1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "arch/arm-get-next-pcs.h"
51 #include "gdb/sim-arm.h"
54 #include "coff/internal.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
244 /* get_next_pcs operations. */
245 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
246 arm_get_next_pcs_read_memory_unsigned_integer,
247 arm_get_next_pcs_syscall_next_pc,
248 arm_get_next_pcs_addr_bits_remove,
249 arm_get_next_pcs_is_thumb,
253 struct arm_prologue_cache
255 /* The stack pointer at the time this frame was created; i.e. the
256 caller's stack pointer when this function was called. It is used
257 to identify this frame. */
260 /* The frame base for this frame is just prev_sp - frame size.
261 FRAMESIZE is the distance from the frame pointer to the
262 initial stack pointer. */
266 /* The register used to hold the frame pointer for this frame. */
269 /* Saved register offsets. */
270 struct trad_frame_saved_reg *saved_regs;
273 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
274 CORE_ADDR prologue_start,
275 CORE_ADDR prologue_end,
276 struct arm_prologue_cache *cache);
278 /* Architecture version for displaced stepping. This effects the behaviour of
279 certain instructions, and really should not be hard-wired. */
281 #define DISPLACED_STEPPING_ARCH_VERSION 5
283 /* Set to true if the 32-bit mode is in use. */
287 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
290 arm_psr_thumb_bit (struct gdbarch *gdbarch)
292 if (gdbarch_tdep (gdbarch)->is_m)
298 /* Determine if the processor is currently executing in Thumb mode. */
301 arm_is_thumb (struct regcache *regcache)
304 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308 return (cpsr & t_bit) != 0;
311 /* Determine if FRAME is executing in Thumb mode. */
314 arm_frame_is_thumb (struct frame_info *frame)
317 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
319 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
320 directly (from a signal frame or dummy frame) or by interpreting
321 the saved LR (from a prologue or DWARF frame). So consult it and
322 trust the unwinders. */
323 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325 return (cpsr & t_bit) != 0;
328 /* Callback for VEC_lower_bound. */
331 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
332 const struct arm_mapping_symbol *rhs)
334 return lhs->value < rhs->value;
337 /* Search for the mapping symbol covering MEMADDR. If one is found,
338 return its type. Otherwise, return 0. If START is non-NULL,
339 set *START to the location of the mapping symbol. */
342 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
344 struct obj_section *sec;
346 /* If there are mapping symbols, consult them. */
347 sec = find_pc_section (memaddr);
350 struct arm_per_objfile *data;
351 VEC(arm_mapping_symbol_s) *map;
352 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
356 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
357 arm_objfile_data_key);
360 map = data->section_maps[sec->the_bfd_section->index];
361 if (!VEC_empty (arm_mapping_symbol_s, map))
363 struct arm_mapping_symbol *map_sym;
365 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
366 arm_compare_mapping_symbols);
368 /* VEC_lower_bound finds the earliest ordered insertion
369 point. If the following symbol starts at this exact
370 address, we use that; otherwise, the preceding
371 mapping symbol covers this address. */
372 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
375 if (map_sym->value == map_key.value)
378 *start = map_sym->value + obj_section_addr (sec);
379 return map_sym->type;
385 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
387 *start = map_sym->value + obj_section_addr (sec);
388 return map_sym->type;
397 /* Determine if the program counter specified in MEMADDR is in a Thumb
398 function. This function should be called for addresses unrelated to
399 any executing frame; otherwise, prefer arm_frame_is_thumb. */
402 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
404 struct bound_minimal_symbol sym;
406 struct displaced_step_closure* dsc
407 = get_displaced_step_closure_by_addr(memaddr);
409 /* If checking the mode of displaced instruction in copy area, the mode
410 should be determined by instruction on the original address. */
414 fprintf_unfiltered (gdb_stdlog,
415 "displaced: check mode of %.8lx instead of %.8lx\n",
416 (unsigned long) dsc->insn_addr,
417 (unsigned long) memaddr);
418 memaddr = dsc->insn_addr;
421 /* If bit 0 of the address is set, assume this is a Thumb address. */
422 if (IS_THUMB_ADDR (memaddr))
425 /* Respect internal mode override if active. */
426 if (arm_override_mode != -1)
427 return arm_override_mode;
429 /* If the user wants to override the symbol table, let him. */
430 if (strcmp (arm_force_mode_string, "arm") == 0)
432 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 /* ARM v6-M and v7-M are always in Thumb mode. */
436 if (gdbarch_tdep (gdbarch)->is_m)
439 /* If there are mapping symbols, consult them. */
440 type = arm_find_mapping_symbol (memaddr, NULL);
444 /* Thumb functions have a "special" bit set in minimal symbols. */
445 sym = lookup_minimal_symbol_by_pc (memaddr);
447 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449 /* If the user wants to override the fallback mode, let them. */
450 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 /* If we couldn't find any symbol, but we're talking to a running
456 target, then trust the current value of $cpsr. This lets
457 "display/i $pc" always show the correct mode (though if there is
458 a symbol table we will not reach here, so it still may not be
459 displayed in the mode it will be executed). */
460 if (target_has_registers)
461 return arm_frame_is_thumb (get_current_frame ());
463 /* Otherwise we're out of luck; we assume ARM. */
467 /* Remove useless bits from addresses in a running program. */
469 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
471 /* On M-profile devices, do not strip the low bit from EXC_RETURN
472 (the magic exception return address). */
473 if (gdbarch_tdep (gdbarch)->is_m
474 && (val & 0xfffffff0) == 0xfffffff0)
478 return UNMAKE_THUMB_ADDR (val);
480 return (val & 0x03fffffc);
483 /* Return 1 if PC is the start of a compiler helper function which
484 can be safely ignored during prologue skipping. IS_THUMB is true
485 if the function is known to be a Thumb function due to the way it
488 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
491 struct bound_minimal_symbol msym;
493 msym = lookup_minimal_symbol_by_pc (pc);
494 if (msym.minsym != NULL
495 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
496 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
498 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
500 /* The GNU linker's Thumb call stub to foo is named
502 if (strstr (name, "_from_thumb") != NULL)
505 /* On soft-float targets, __truncdfsf2 is called to convert promoted
506 arguments to their argument types in non-prototyped
508 if (startswith (name, "__truncdfsf2"))
510 if (startswith (name, "__aeabi_d2f"))
513 /* Internal functions related to thread-local storage. */
514 if (startswith (name, "__tls_get_addr"))
516 if (startswith (name, "__aeabi_read_tp"))
521 /* If we run against a stripped glibc, we may be unable to identify
522 special functions by name. Check for one important case,
523 __aeabi_read_tp, by comparing the *code* against the default
524 implementation (this is hand-written ARM assembler in glibc). */
527 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
528 == 0xe3e00a0f /* mov r0, #0xffff0fff */
529 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
530 == 0xe240f01f) /* sub pc, r0, #31 */
537 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
538 the first 16-bit of instruction, and INSN2 is the second 16-bit of
540 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
541 ((bits ((insn1), 0, 3) << 12) \
542 | (bits ((insn1), 10, 10) << 11) \
543 | (bits ((insn2), 12, 14) << 8) \
544 | bits ((insn2), 0, 7))
546 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
547 the 32-bit instruction. */
548 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
549 ((bits ((insn), 16, 19) << 12) \
550 | bits ((insn), 0, 11))
552 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
555 thumb_expand_immediate (unsigned int imm)
557 unsigned int count = imm >> 7;
565 return (imm & 0xff) | ((imm & 0xff) << 16);
567 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
569 return (imm & 0xff) | ((imm & 0xff) << 8)
570 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
573 return (0x80 | (imm & 0x7f)) << (32 - count);
576 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
577 epilogue, 0 otherwise. */
580 thumb_instruction_restores_sp (unsigned short insn)
582 return (insn == 0x46bd /* mov sp, r7 */
583 || (insn & 0xff80) == 0xb000 /* add sp, imm */
584 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
587 /* Analyze a Thumb prologue, looking for a recognizable stack frame
588 and frame pointer. Scan until we encounter a store that could
589 clobber the stack frame unexpectedly, or an unknown instruction.
590 Return the last address which is definitely safe to skip for an
591 initial breakpoint. */
594 thumb_analyze_prologue (struct gdbarch *gdbarch,
595 CORE_ADDR start, CORE_ADDR limit,
596 struct arm_prologue_cache *cache)
598 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
602 struct pv_area *stack;
603 struct cleanup *back_to;
605 CORE_ADDR unrecognized_pc = 0;
607 for (i = 0; i < 16; i++)
608 regs[i] = pv_register (i, 0);
609 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
610 back_to = make_cleanup_free_pv_area (stack);
612 while (start < limit)
616 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
618 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
626 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
627 whether to save LR (R14). */
628 mask = (insn & 0xff) | ((insn & 0x100) << 6);
630 /* Calculate offsets of saved R0-R7 and LR. */
631 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
632 if (mask & (1 << regno))
634 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
636 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
639 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
641 offset = (insn & 0x7f) << 2; /* get scaled offset */
642 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
645 else if (thumb_instruction_restores_sp (insn))
647 /* Don't scan past the epilogue. */
650 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
651 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
653 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
654 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
655 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
657 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
658 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
659 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
661 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
662 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
663 && pv_is_constant (regs[bits (insn, 3, 5)]))
664 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
665 regs[bits (insn, 6, 8)]);
666 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
667 && pv_is_constant (regs[bits (insn, 3, 6)]))
669 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
670 int rm = bits (insn, 3, 6);
671 regs[rd] = pv_add (regs[rd], regs[rm]);
673 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
675 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
676 int src_reg = (insn & 0x78) >> 3;
677 regs[dst_reg] = regs[src_reg];
679 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
681 /* Handle stores to the stack. Normally pushes are used,
682 but with GCC -mtpcs-frame, there may be other stores
683 in the prologue to create the frame. */
684 int regno = (insn >> 8) & 0x7;
687 offset = (insn & 0xff) << 2;
688 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
690 if (pv_area_store_would_trash (stack, addr))
693 pv_area_store (stack, addr, 4, regs[regno]);
695 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
697 int rd = bits (insn, 0, 2);
698 int rn = bits (insn, 3, 5);
701 offset = bits (insn, 6, 10) << 2;
702 addr = pv_add_constant (regs[rn], offset);
704 if (pv_area_store_would_trash (stack, addr))
707 pv_area_store (stack, addr, 4, regs[rd]);
709 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
710 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 /* Ignore stores of argument registers to the stack. */
714 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 /* Ignore block loads from the stack, potentially copying
717 parameters from memory. */
719 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
720 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
721 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
722 /* Similarly ignore single loads from the stack. */
724 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
725 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
726 /* Skip register copies, i.e. saves to another register
727 instead of the stack. */
729 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
730 /* Recognize constant loads; even with small stacks these are necessary
732 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
733 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
735 /* Constant pool loads, for the same reason. */
736 unsigned int constant;
739 loc = start + 4 + bits (insn, 0, 7) * 4;
740 constant = read_memory_unsigned_integer (loc, 4, byte_order);
741 regs[bits (insn, 8, 10)] = pv_constant (constant);
743 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
745 unsigned short inst2;
747 inst2 = read_memory_unsigned_integer (start + 2, 2,
748 byte_order_for_code);
750 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
752 /* BL, BLX. Allow some special function calls when
753 skipping the prologue; GCC generates these before
754 storing arguments to the stack. */
756 int j1, j2, imm1, imm2;
758 imm1 = sbits (insn, 0, 10);
759 imm2 = bits (inst2, 0, 10);
760 j1 = bit (inst2, 13);
761 j2 = bit (inst2, 11);
763 offset = ((imm1 << 12) + (imm2 << 1));
764 offset ^= ((!j2) << 22) | ((!j1) << 23);
766 nextpc = start + 4 + offset;
767 /* For BLX make sure to clear the low bits. */
768 if (bit (inst2, 12) == 0)
769 nextpc = nextpc & 0xfffffffc;
771 if (!skip_prologue_function (gdbarch, nextpc,
772 bit (inst2, 12) != 0))
776 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
778 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
780 pv_t addr = regs[bits (insn, 0, 3)];
783 if (pv_area_store_would_trash (stack, addr))
786 /* Calculate offsets of saved registers. */
787 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
788 if (inst2 & (1 << regno))
790 addr = pv_add_constant (addr, -4);
791 pv_area_store (stack, addr, 4, regs[regno]);
795 regs[bits (insn, 0, 3)] = addr;
798 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
800 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
802 int regno1 = bits (inst2, 12, 15);
803 int regno2 = bits (inst2, 8, 11);
804 pv_t addr = regs[bits (insn, 0, 3)];
806 offset = inst2 & 0xff;
808 addr = pv_add_constant (addr, offset);
810 addr = pv_add_constant (addr, -offset);
812 if (pv_area_store_would_trash (stack, addr))
815 pv_area_store (stack, addr, 4, regs[regno1]);
816 pv_area_store (stack, pv_add_constant (addr, 4),
820 regs[bits (insn, 0, 3)] = addr;
823 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
824 && (inst2 & 0x0c00) == 0x0c00
825 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
827 int regno = bits (inst2, 12, 15);
828 pv_t addr = regs[bits (insn, 0, 3)];
830 offset = inst2 & 0xff;
832 addr = pv_add_constant (addr, offset);
834 addr = pv_add_constant (addr, -offset);
836 if (pv_area_store_would_trash (stack, addr))
839 pv_area_store (stack, addr, 4, regs[regno]);
842 regs[bits (insn, 0, 3)] = addr;
845 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno = bits (inst2, 12, 15);
851 offset = inst2 & 0xfff;
852 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
854 if (pv_area_store_would_trash (stack, addr))
857 pv_area_store (stack, addr, 4, regs[regno]);
860 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
861 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
862 /* Ignore stores of argument registers to the stack. */
865 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
866 && (inst2 & 0x0d00) == 0x0c00
867 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
868 /* Ignore stores of argument registers to the stack. */
871 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
873 && (inst2 & 0x8000) == 0x0000
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 /* Ignore block loads from the stack, potentially copying
876 parameters from memory. */
879 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 /* Similarly ignore dual loads from the stack. */
885 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
886 && (inst2 & 0x0d00) == 0x0c00
887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
888 /* Similarly ignore single loads from the stack. */
891 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 /* Similarly ignore single loads from the stack. */
896 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
897 && (inst2 & 0x8000) == 0x0000)
899 unsigned int imm = ((bits (insn, 10, 10) << 11)
900 | (bits (inst2, 12, 14) << 8)
901 | bits (inst2, 0, 7));
903 regs[bits (inst2, 8, 11)]
904 = pv_add_constant (regs[bits (insn, 0, 3)],
905 thumb_expand_immediate (imm));
908 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
909 && (inst2 & 0x8000) == 0x0000)
911 unsigned int imm = ((bits (insn, 10, 10) << 11)
912 | (bits (inst2, 12, 14) << 8)
913 | bits (inst2, 0, 7));
915 regs[bits (inst2, 8, 11)]
916 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
919 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
920 && (inst2 & 0x8000) == 0x0000)
922 unsigned int imm = ((bits (insn, 10, 10) << 11)
923 | (bits (inst2, 12, 14) << 8)
924 | bits (inst2, 0, 7));
926 regs[bits (inst2, 8, 11)]
927 = pv_add_constant (regs[bits (insn, 0, 3)],
928 - (CORE_ADDR) thumb_expand_immediate (imm));
931 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
932 && (inst2 & 0x8000) == 0x0000)
934 unsigned int imm = ((bits (insn, 10, 10) << 11)
935 | (bits (inst2, 12, 14) << 8)
936 | bits (inst2, 0, 7));
938 regs[bits (inst2, 8, 11)]
939 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
942 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
948 regs[bits (inst2, 8, 11)]
949 = pv_constant (thumb_expand_immediate (imm));
952 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
955 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
957 regs[bits (inst2, 8, 11)] = pv_constant (imm);
960 else if (insn == 0xea5f /* mov.w Rd,Rm */
961 && (inst2 & 0xf0f0) == 0)
963 int dst_reg = (inst2 & 0x0f00) >> 8;
964 int src_reg = inst2 & 0xf;
965 regs[dst_reg] = regs[src_reg];
968 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
970 /* Constant pool loads. */
971 unsigned int constant;
974 offset = bits (inst2, 0, 11);
976 loc = start + 4 + offset;
978 loc = start + 4 - offset;
980 constant = read_memory_unsigned_integer (loc, 4, byte_order);
981 regs[bits (inst2, 12, 15)] = pv_constant (constant);
984 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
986 /* Constant pool loads. */
987 unsigned int constant;
990 offset = bits (inst2, 0, 7) << 2;
992 loc = start + 4 + offset;
994 loc = start + 4 - offset;
996 constant = read_memory_unsigned_integer (loc, 4, byte_order);
997 regs[bits (inst2, 12, 15)] = pv_constant (constant);
999 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1000 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1003 else if (thumb2_instruction_changes_pc (insn, inst2))
1005 /* Don't scan past anything that might change control flow. */
1010 /* The optimizer might shove anything into the prologue,
1011 so we just skip what we don't recognize. */
1012 unrecognized_pc = start;
1017 else if (thumb_instruction_changes_pc (insn))
1019 /* Don't scan past anything that might change control flow. */
1024 /* The optimizer might shove anything into the prologue,
1025 so we just skip what we don't recognize. */
1026 unrecognized_pc = start;
1033 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1034 paddress (gdbarch, start));
1036 if (unrecognized_pc == 0)
1037 unrecognized_pc = start;
1041 do_cleanups (back_to);
1042 return unrecognized_pc;
1045 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1047 /* Frame pointer is fp. Frame size is constant. */
1048 cache->framereg = ARM_FP_REGNUM;
1049 cache->framesize = -regs[ARM_FP_REGNUM].k;
1051 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1053 /* Frame pointer is r7. Frame size is constant. */
1054 cache->framereg = THUMB_FP_REGNUM;
1055 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1059 /* Try the stack pointer... this is a bit desperate. */
1060 cache->framereg = ARM_SP_REGNUM;
1061 cache->framesize = -regs[ARM_SP_REGNUM].k;
1064 for (i = 0; i < 16; i++)
1065 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1066 cache->saved_regs[i].addr = offset;
1068 do_cleanups (back_to);
1069 return unrecognized_pc;
1073 /* Try to analyze the instructions starting from PC, which load symbol
1074 __stack_chk_guard. Return the address of instruction after loading this
1075 symbol, set the dest register number to *BASEREG, and set the size of
1076 instructions for loading symbol in OFFSET. Return 0 if instructions are
1080 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1081 unsigned int *destreg, int *offset)
1083 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1084 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1085 unsigned int low, high, address;
1090 unsigned short insn1
1091 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1093 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1095 *destreg = bits (insn1, 8, 10);
1097 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1098 address = read_memory_unsigned_integer (address, 4,
1099 byte_order_for_code);
1101 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1103 unsigned short insn2
1104 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1106 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1109 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1111 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1113 /* movt Rd, #const */
1114 if ((insn1 & 0xfbc0) == 0xf2c0)
1116 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1117 *destreg = bits (insn2, 8, 11);
1119 address = (high << 16 | low);
1126 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1128 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1130 address = bits (insn, 0, 11) + pc + 8;
1131 address = read_memory_unsigned_integer (address, 4,
1132 byte_order_for_code);
1134 *destreg = bits (insn, 12, 15);
1137 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1139 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1144 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1146 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1147 *destreg = bits (insn, 12, 15);
1149 address = (high << 16 | low);
1157 /* Try to skip a sequence of instructions used for stack protector. If PC
1158 points to the first instruction of this sequence, return the address of
1159 first instruction after this sequence, otherwise, return original PC.
1161 On arm, this sequence of instructions is composed of mainly three steps,
1162 Step 1: load symbol __stack_chk_guard,
1163 Step 2: load from address of __stack_chk_guard,
1164 Step 3: store it to somewhere else.
1166 Usually, instructions on step 2 and step 3 are the same on various ARM
1167 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1168 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1169 instructions in step 1 vary from different ARM architectures. On ARMv7,
1172 movw Rn, #:lower16:__stack_chk_guard
1173 movt Rn, #:upper16:__stack_chk_guard
1180 .word __stack_chk_guard
1182 Since ldr/str is a very popular instruction, we can't use them as
1183 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1184 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1185 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1188 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1190 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1191 unsigned int basereg;
1192 struct bound_minimal_symbol stack_chk_guard;
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1197 /* Try to parse the instructions in Step 1. */
1198 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1203 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1204 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1205 Otherwise, this sequence cannot be for stack protector. */
1206 if (stack_chk_guard.minsym == NULL
1207 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1212 unsigned int destreg;
1214 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1216 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1217 if ((insn & 0xf800) != 0x6800)
1219 if (bits (insn, 3, 5) != basereg)
1221 destreg = bits (insn, 0, 2);
1223 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1224 byte_order_for_code);
1225 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1226 if ((insn & 0xf800) != 0x6000)
1228 if (destreg != bits (insn, 0, 2))
1233 unsigned int destreg;
1235 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1237 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1238 if ((insn & 0x0e500000) != 0x04100000)
1240 if (bits (insn, 16, 19) != basereg)
1242 destreg = bits (insn, 12, 15);
1243 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1244 insn = read_memory_unsigned_integer (pc + offset + 4,
1245 4, byte_order_for_code);
1246 if ((insn & 0x0e500000) != 0x04000000)
1248 if (bits (insn, 12, 15) != destreg)
1251 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1254 return pc + offset + 4;
1256 return pc + offset + 8;
1259 /* Advance the PC across any function entry prologue instructions to
1260 reach some "real" code.
1262 The APCS (ARM Procedure Call Standard) defines the following
1266 [stmfd sp!, {a1,a2,a3,a4}]
1267 stmfd sp!, {...,fp,ip,lr,pc}
1268 [stfe f7, [sp, #-12]!]
1269 [stfe f6, [sp, #-12]!]
1270 [stfe f5, [sp, #-12]!]
1271 [stfe f4, [sp, #-12]!]
1272 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1275 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1277 CORE_ADDR func_addr, limit_pc;
1279 /* See if we can determine the end of the prologue via the symbol table.
1280 If so, then return either PC, or the PC after the prologue, whichever
1282 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1284 CORE_ADDR post_prologue_pc
1285 = skip_prologue_using_sal (gdbarch, func_addr);
1286 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1288 if (post_prologue_pc)
1290 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1293 /* GCC always emits a line note before the prologue and another
1294 one after, even if the two are at the same address or on the
1295 same line. Take advantage of this so that we do not need to
1296 know every instruction that might appear in the prologue. We
1297 will have producer information for most binaries; if it is
1298 missing (e.g. for -gstabs), assuming the GNU tools. */
1299 if (post_prologue_pc
1301 || COMPUNIT_PRODUCER (cust) == NULL
1302 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1303 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1304 return post_prologue_pc;
1306 if (post_prologue_pc != 0)
1308 CORE_ADDR analyzed_limit;
1310 /* For non-GCC compilers, make sure the entire line is an
1311 acceptable prologue; GDB will round this function's
1312 return value up to the end of the following line so we
1313 can not skip just part of a line (and we do not want to).
1315 RealView does not treat the prologue specially, but does
1316 associate prologue code with the opening brace; so this
1317 lets us skip the first line if we think it is the opening
1319 if (arm_pc_is_thumb (gdbarch, func_addr))
1320 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1321 post_prologue_pc, NULL);
1323 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1324 post_prologue_pc, NULL);
1326 if (analyzed_limit != post_prologue_pc)
1329 return post_prologue_pc;
1333 /* Can't determine prologue from the symbol table, need to examine
1336 /* Find an upper limit on the function prologue using the debug
1337 information. If the debug information could not be used to provide
1338 that bound, then use an arbitrary large number as the upper bound. */
1339 /* Like arm_scan_prologue, stop no later than pc + 64. */
1340 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1342 limit_pc = pc + 64; /* Magic. */
1345 /* Check if this is Thumb code. */
1346 if (arm_pc_is_thumb (gdbarch, pc))
1347 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1349 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1353 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1354 This function decodes a Thumb function prologue to determine:
1355 1) the size of the stack frame
1356 2) which registers are saved on it
1357 3) the offsets of saved regs
1358 4) the offset from the stack pointer to the frame pointer
1360 A typical Thumb function prologue would create this stack frame
1361 (offsets relative to FP)
1362 old SP -> 24 stack parameters
1365 R7 -> 0 local variables (16 bytes)
1366 SP -> -12 additional stack space (12 bytes)
1367 The frame size would thus be 36 bytes, and the frame offset would be
1368 12 bytes. The frame register is R7.
1370 The comments for thumb_skip_prolog() describe the algorithm we use
1371 to detect the end of the prolog. */
1375 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1376 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1378 CORE_ADDR prologue_start;
1379 CORE_ADDR prologue_end;
1381 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1384 /* See comment in arm_scan_prologue for an explanation of
1386 if (prologue_end > prologue_start + 64)
1388 prologue_end = prologue_start + 64;
1392 /* We're in the boondocks: we have no idea where the start of the
1396 prologue_end = min (prologue_end, prev_pc);
1398 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1401 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1405 arm_instruction_restores_sp (unsigned int insn)
1407 if (bits (insn, 28, 31) != INST_NV)
1409 if ((insn & 0x0df0f000) == 0x0080d000
1410 /* ADD SP (register or immediate). */
1411 || (insn & 0x0df0f000) == 0x0040d000
1412 /* SUB SP (register or immediate). */
1413 || (insn & 0x0ffffff0) == 0x01a0d000
1415 || (insn & 0x0fff0000) == 0x08bd0000
1417 || (insn & 0x0fff0000) == 0x049d0000)
1418 /* POP of a single register. */
1425 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1426 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1427 fill it in. Return the first address not recognized as a prologue
1430 We recognize all the instructions typically found in ARM prologues,
1431 plus harmless instructions which can be skipped (either for analysis
1432 purposes, or a more restrictive set that can be skipped when finding
1433 the end of the prologue). */
1436 arm_analyze_prologue (struct gdbarch *gdbarch,
1437 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1438 struct arm_prologue_cache *cache)
1440 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1442 CORE_ADDR offset, current_pc;
1443 pv_t regs[ARM_FPS_REGNUM];
1444 struct pv_area *stack;
1445 struct cleanup *back_to;
1446 CORE_ADDR unrecognized_pc = 0;
1448 /* Search the prologue looking for instructions that set up the
1449 frame pointer, adjust the stack pointer, and save registers.
1451 Be careful, however, and if it doesn't look like a prologue,
1452 don't try to scan it. If, for instance, a frameless function
1453 begins with stmfd sp!, then we will tell ourselves there is
1454 a frame, which will confuse stack traceback, as well as "finish"
1455 and other operations that rely on a knowledge of the stack
1458 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1459 regs[regno] = pv_register (regno, 0);
1460 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1461 back_to = make_cleanup_free_pv_area (stack);
1463 for (current_pc = prologue_start;
1464 current_pc < prologue_end;
1468 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1470 if (insn == 0xe1a0c00d) /* mov ip, sp */
1472 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1475 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1476 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1478 unsigned imm = insn & 0xff; /* immediate value */
1479 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1480 int rd = bits (insn, 12, 15);
1481 imm = (imm >> rot) | (imm << (32 - rot));
1482 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1485 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1486 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1488 unsigned imm = insn & 0xff; /* immediate value */
1489 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1490 int rd = bits (insn, 12, 15);
1491 imm = (imm >> rot) | (imm << (32 - rot));
1492 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1495 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1498 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1500 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1501 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1502 regs[bits (insn, 12, 15)]);
1505 else if ((insn & 0xffff0000) == 0xe92d0000)
1506 /* stmfd sp!, {..., fp, ip, lr, pc}
1508 stmfd sp!, {a1, a2, a3, a4} */
1510 int mask = insn & 0xffff;
1512 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1515 /* Calculate offsets of saved registers. */
1516 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1517 if (mask & (1 << regno))
1520 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1521 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1524 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1525 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1526 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1528 /* No need to add this to saved_regs -- it's just an arg reg. */
1531 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1532 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1533 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1535 /* No need to add this to saved_regs -- it's just an arg reg. */
1538 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1540 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1542 /* No need to add this to saved_regs -- it's just arg regs. */
1545 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1547 unsigned imm = insn & 0xff; /* immediate value */
1548 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1549 imm = (imm >> rot) | (imm << (32 - rot));
1550 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1552 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1554 unsigned imm = insn & 0xff; /* immediate value */
1555 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1556 imm = (imm >> rot) | (imm << (32 - rot));
1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1559 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1561 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1563 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1566 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1567 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1568 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1570 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1572 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1574 int n_saved_fp_regs;
1575 unsigned int fp_start_reg, fp_bound_reg;
1577 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1580 if ((insn & 0x800) == 0x800) /* N0 is set */
1582 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1583 n_saved_fp_regs = 3;
1585 n_saved_fp_regs = 1;
1589 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1590 n_saved_fp_regs = 2;
1592 n_saved_fp_regs = 4;
1595 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1596 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1597 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1599 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1600 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1601 regs[fp_start_reg++]);
1604 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1606 /* Allow some special function calls when skipping the
1607 prologue; GCC generates these before storing arguments to
1609 CORE_ADDR dest = BranchDest (current_pc, insn);
1611 if (skip_prologue_function (gdbarch, dest, 0))
1616 else if ((insn & 0xf0000000) != 0xe0000000)
1617 break; /* Condition not true, exit early. */
1618 else if (arm_instruction_changes_pc (insn))
1619 /* Don't scan past anything that might change control flow. */
1621 else if (arm_instruction_restores_sp (insn))
1623 /* Don't scan past the epilogue. */
1626 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1627 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1628 /* Ignore block loads from the stack, potentially copying
1629 parameters from memory. */
1631 else if ((insn & 0xfc500000) == 0xe4100000
1632 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1633 /* Similarly ignore single loads from the stack. */
1635 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1636 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1637 register instead of the stack. */
1641 /* The optimizer might shove anything into the prologue, if
1642 we build up cache (cache != NULL) from scanning prologue,
1643 we just skip what we don't recognize and scan further to
1644 make cache as complete as possible. However, if we skip
1645 prologue, we'll stop immediately on unrecognized
1647 unrecognized_pc = current_pc;
1655 if (unrecognized_pc == 0)
1656 unrecognized_pc = current_pc;
1660 int framereg, framesize;
1662 /* The frame size is just the distance from the frame register
1663 to the original stack pointer. */
1664 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1666 /* Frame pointer is fp. */
1667 framereg = ARM_FP_REGNUM;
1668 framesize = -regs[ARM_FP_REGNUM].k;
1672 /* Try the stack pointer... this is a bit desperate. */
1673 framereg = ARM_SP_REGNUM;
1674 framesize = -regs[ARM_SP_REGNUM].k;
1677 cache->framereg = framereg;
1678 cache->framesize = framesize;
1680 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1681 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1682 cache->saved_regs[regno].addr = offset;
1686 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1687 paddress (gdbarch, unrecognized_pc));
1689 do_cleanups (back_to);
1690 return unrecognized_pc;
1694 arm_scan_prologue (struct frame_info *this_frame,
1695 struct arm_prologue_cache *cache)
1697 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1698 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1699 CORE_ADDR prologue_start, prologue_end;
1700 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1701 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1703 /* Assume there is no frame until proven otherwise. */
1704 cache->framereg = ARM_SP_REGNUM;
1705 cache->framesize = 0;
1707 /* Check for Thumb prologue. */
1708 if (arm_frame_is_thumb (this_frame))
1710 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1714 /* Find the function prologue. If we can't find the function in
1715 the symbol table, peek in the stack frame to find the PC. */
1716 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1719 /* One way to find the end of the prologue (which works well
1720 for unoptimized code) is to do the following:
1722 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1725 prologue_end = prev_pc;
1726 else if (sal.end < prologue_end)
1727 prologue_end = sal.end;
1729 This mechanism is very accurate so long as the optimizer
1730 doesn't move any instructions from the function body into the
1731 prologue. If this happens, sal.end will be the last
1732 instruction in the first hunk of prologue code just before
1733 the first instruction that the scheduler has moved from
1734 the body to the prologue.
1736 In order to make sure that we scan all of the prologue
1737 instructions, we use a slightly less accurate mechanism which
1738 may scan more than necessary. To help compensate for this
1739 lack of accuracy, the prologue scanning loop below contains
1740 several clauses which'll cause the loop to terminate early if
1741 an implausible prologue instruction is encountered.
1747 is a suitable endpoint since it accounts for the largest
1748 possible prologue plus up to five instructions inserted by
1751 if (prologue_end > prologue_start + 64)
1753 prologue_end = prologue_start + 64; /* See above. */
1758 /* We have no symbol information. Our only option is to assume this
1759 function has a standard stack frame and the normal frame register.
1760 Then, we can find the value of our frame pointer on entrance to
1761 the callee (or at the present moment if this is the innermost frame).
1762 The value stored there should be the address of the stmfd + 8. */
1763 CORE_ADDR frame_loc;
1764 LONGEST return_value;
1766 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1767 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1771 prologue_start = gdbarch_addr_bits_remove
1772 (gdbarch, return_value) - 8;
1773 prologue_end = prologue_start + 64; /* See above. */
1777 if (prev_pc < prologue_end)
1778 prologue_end = prev_pc;
1780 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1783 static struct arm_prologue_cache *
1784 arm_make_prologue_cache (struct frame_info *this_frame)
1787 struct arm_prologue_cache *cache;
1788 CORE_ADDR unwound_fp;
1790 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1791 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1793 arm_scan_prologue (this_frame, cache);
1795 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1796 if (unwound_fp == 0)
1799 cache->prev_sp = unwound_fp + cache->framesize;
1801 /* Calculate actual addresses of saved registers using offsets
1802 determined by arm_scan_prologue. */
1803 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1804 if (trad_frame_addr_p (cache->saved_regs, reg))
1805 cache->saved_regs[reg].addr += cache->prev_sp;
1810 /* Implementation of the stop_reason hook for arm_prologue frames. */
1812 static enum unwind_stop_reason
1813 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1816 struct arm_prologue_cache *cache;
1819 if (*this_cache == NULL)
1820 *this_cache = arm_make_prologue_cache (this_frame);
1821 cache = (struct arm_prologue_cache *) *this_cache;
1823 /* This is meant to halt the backtrace at "_start". */
1824 pc = get_frame_pc (this_frame);
1825 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1826 return UNWIND_OUTERMOST;
1828 /* If we've hit a wall, stop. */
1829 if (cache->prev_sp == 0)
1830 return UNWIND_OUTERMOST;
1832 return UNWIND_NO_REASON;
1835 /* Our frame ID for a normal frame is the current function's starting PC
1836 and the caller's SP when we were called. */
1839 arm_prologue_this_id (struct frame_info *this_frame,
1841 struct frame_id *this_id)
1843 struct arm_prologue_cache *cache;
1847 if (*this_cache == NULL)
1848 *this_cache = arm_make_prologue_cache (this_frame);
1849 cache = (struct arm_prologue_cache *) *this_cache;
1851 /* Use function start address as part of the frame ID. If we cannot
1852 identify the start address (due to missing symbol information),
1853 fall back to just using the current PC. */
1854 pc = get_frame_pc (this_frame);
1855 func = get_frame_func (this_frame);
1859 id = frame_id_build (cache->prev_sp, func);
1863 static struct value *
1864 arm_prologue_prev_register (struct frame_info *this_frame,
1868 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1869 struct arm_prologue_cache *cache;
1871 if (*this_cache == NULL)
1872 *this_cache = arm_make_prologue_cache (this_frame);
1873 cache = (struct arm_prologue_cache *) *this_cache;
1875 /* If we are asked to unwind the PC, then we need to return the LR
1876 instead. The prologue may save PC, but it will point into this
1877 frame's prologue, not the next frame's resume location. Also
1878 strip the saved T bit. A valid LR may have the low bit set, but
1879 a valid PC never does. */
1880 if (prev_regnum == ARM_PC_REGNUM)
1884 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1885 return frame_unwind_got_constant (this_frame, prev_regnum,
1886 arm_addr_bits_remove (gdbarch, lr));
1889 /* SP is generally not saved to the stack, but this frame is
1890 identified by the next frame's stack pointer at the time of the call.
1891 The value was already reconstructed into PREV_SP. */
1892 if (prev_regnum == ARM_SP_REGNUM)
1893 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1895 /* The CPSR may have been changed by the call instruction and by the
1896 called function. The only bit we can reconstruct is the T bit,
1897 by checking the low bit of LR as of the call. This is a reliable
1898 indicator of Thumb-ness except for some ARM v4T pre-interworking
1899 Thumb code, which could get away with a clear low bit as long as
1900 the called function did not use bx. Guess that all other
1901 bits are unchanged; the condition flags are presumably lost,
1902 but the processor status is likely valid. */
1903 if (prev_regnum == ARM_PS_REGNUM)
1906 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1908 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1909 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1910 if (IS_THUMB_ADDR (lr))
1914 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1917 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1921 struct frame_unwind arm_prologue_unwind = {
1923 arm_prologue_unwind_stop_reason,
1924 arm_prologue_this_id,
1925 arm_prologue_prev_register,
1927 default_frame_sniffer
1930 /* Maintain a list of ARM exception table entries per objfile, similar to the
1931 list of mapping symbols. We only cache entries for standard ARM-defined
1932 personality routines; the cache will contain only the frame unwinding
1933 instructions associated with the entry (not the descriptors). */
1935 static const struct objfile_data *arm_exidx_data_key;
1937 struct arm_exidx_entry
1942 typedef struct arm_exidx_entry arm_exidx_entry_s;
1943 DEF_VEC_O(arm_exidx_entry_s);
1945 struct arm_exidx_data
1947 VEC(arm_exidx_entry_s) **section_maps;
1951 arm_exidx_data_free (struct objfile *objfile, void *arg)
1953 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1956 for (i = 0; i < objfile->obfd->section_count; i++)
1957 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1961 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1962 const struct arm_exidx_entry *rhs)
1964 return lhs->addr < rhs->addr;
1967 static struct obj_section *
1968 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1970 struct obj_section *osect;
1972 ALL_OBJFILE_OSECTIONS (objfile, osect)
1973 if (bfd_get_section_flags (objfile->obfd,
1974 osect->the_bfd_section) & SEC_ALLOC)
1976 bfd_vma start, size;
1977 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1978 size = bfd_get_section_size (osect->the_bfd_section);
1980 if (start <= vma && vma < start + size)
1987 /* Parse contents of exception table and exception index sections
1988 of OBJFILE, and fill in the exception table entry cache.
1990 For each entry that refers to a standard ARM-defined personality
1991 routine, extract the frame unwinding instructions (from either
1992 the index or the table section). The unwinding instructions
1994 - extracting them from the rest of the table data
1995 - converting to host endianness
1996 - appending the implicit 0xb0 ("Finish") code
1998 The extracted and normalized instructions are stored for later
1999 retrieval by the arm_find_exidx_entry routine. */
2002 arm_exidx_new_objfile (struct objfile *objfile)
2004 struct cleanup *cleanups;
2005 struct arm_exidx_data *data;
2006 asection *exidx, *extab;
2007 bfd_vma exidx_vma = 0, extab_vma = 0;
2008 bfd_size_type exidx_size = 0, extab_size = 0;
2009 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2012 /* If we've already touched this file, do nothing. */
2013 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2015 cleanups = make_cleanup (null_cleanup, NULL);
2017 /* Read contents of exception table and index. */
2018 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2021 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2022 exidx_size = bfd_get_section_size (exidx);
2023 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2024 make_cleanup (xfree, exidx_data);
2026 if (!bfd_get_section_contents (objfile->obfd, exidx,
2027 exidx_data, 0, exidx_size))
2029 do_cleanups (cleanups);
2034 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2037 extab_vma = bfd_section_vma (objfile->obfd, extab);
2038 extab_size = bfd_get_section_size (extab);
2039 extab_data = (gdb_byte *) xmalloc (extab_size);
2040 make_cleanup (xfree, extab_data);
2042 if (!bfd_get_section_contents (objfile->obfd, extab,
2043 extab_data, 0, extab_size))
2045 do_cleanups (cleanups);
2050 /* Allocate exception table data structure. */
2051 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2052 set_objfile_data (objfile, arm_exidx_data_key, data);
2053 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2054 objfile->obfd->section_count,
2055 VEC(arm_exidx_entry_s) *);
2057 /* Fill in exception table. */
2058 for (i = 0; i < exidx_size / 8; i++)
2060 struct arm_exidx_entry new_exidx_entry;
2061 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2062 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2063 bfd_vma addr = 0, word = 0;
2064 int n_bytes = 0, n_words = 0;
2065 struct obj_section *sec;
2066 gdb_byte *entry = NULL;
2068 /* Extract address of start of function. */
2069 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2070 idx += exidx_vma + i * 8;
2072 /* Find section containing function and compute section offset. */
2073 sec = arm_obj_section_from_vma (objfile, idx);
2076 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2078 /* Determine address of exception table entry. */
2081 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2083 else if ((val & 0xff000000) == 0x80000000)
2085 /* Exception table entry embedded in .ARM.exidx
2086 -- must be short form. */
2090 else if (!(val & 0x80000000))
2092 /* Exception table entry in .ARM.extab. */
2093 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2094 addr += exidx_vma + i * 8 + 4;
2096 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2098 word = bfd_h_get_32 (objfile->obfd,
2099 extab_data + addr - extab_vma);
2102 if ((word & 0xff000000) == 0x80000000)
2107 else if ((word & 0xff000000) == 0x81000000
2108 || (word & 0xff000000) == 0x82000000)
2112 n_words = ((word >> 16) & 0xff);
2114 else if (!(word & 0x80000000))
2117 struct obj_section *pers_sec;
2118 int gnu_personality = 0;
2120 /* Custom personality routine. */
2121 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2122 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2124 /* Check whether we've got one of the variants of the
2125 GNU personality routines. */
2126 pers_sec = arm_obj_section_from_vma (objfile, pers);
2129 static const char *personality[] =
2131 "__gcc_personality_v0",
2132 "__gxx_personality_v0",
2133 "__gcj_personality_v0",
2134 "__gnu_objc_personality_v0",
2138 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2141 for (k = 0; personality[k]; k++)
2142 if (lookup_minimal_symbol_by_pc_name
2143 (pc, personality[k], objfile))
2145 gnu_personality = 1;
2150 /* If so, the next word contains a word count in the high
2151 byte, followed by the same unwind instructions as the
2152 pre-defined forms. */
2154 && addr + 4 <= extab_vma + extab_size)
2156 word = bfd_h_get_32 (objfile->obfd,
2157 extab_data + addr - extab_vma);
2160 n_words = ((word >> 24) & 0xff);
2166 /* Sanity check address. */
2168 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2169 n_words = n_bytes = 0;
2171 /* The unwind instructions reside in WORD (only the N_BYTES least
2172 significant bytes are valid), followed by N_WORDS words in the
2173 extab section starting at ADDR. */
2174 if (n_bytes || n_words)
2177 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2178 n_bytes + n_words * 4 + 1);
2181 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2185 word = bfd_h_get_32 (objfile->obfd,
2186 extab_data + addr - extab_vma);
2189 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2190 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2192 *p++ = (gdb_byte) (word & 0xff);
2195 /* Implied "Finish" to terminate the list. */
2199 /* Push entry onto vector. They are guaranteed to always
2200 appear in order of increasing addresses. */
2201 new_exidx_entry.addr = idx;
2202 new_exidx_entry.entry = entry;
2203 VEC_safe_push (arm_exidx_entry_s,
2204 data->section_maps[sec->the_bfd_section->index],
2208 do_cleanups (cleanups);
2211 /* Search for the exception table entry covering MEMADDR. If one is found,
2212 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2213 set *START to the start of the region covered by this entry. */
2216 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2218 struct obj_section *sec;
2220 sec = find_pc_section (memaddr);
2223 struct arm_exidx_data *data;
2224 VEC(arm_exidx_entry_s) *map;
2225 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2228 data = ((struct arm_exidx_data *)
2229 objfile_data (sec->objfile, arm_exidx_data_key));
2232 map = data->section_maps[sec->the_bfd_section->index];
2233 if (!VEC_empty (arm_exidx_entry_s, map))
2235 struct arm_exidx_entry *map_sym;
2237 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2238 arm_compare_exidx_entries);
2240 /* VEC_lower_bound finds the earliest ordered insertion
2241 point. If the following symbol starts at this exact
2242 address, we use that; otherwise, the preceding
2243 exception table entry covers this address. */
2244 if (idx < VEC_length (arm_exidx_entry_s, map))
2246 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2247 if (map_sym->addr == map_key.addr)
2250 *start = map_sym->addr + obj_section_addr (sec);
2251 return map_sym->entry;
2257 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2259 *start = map_sym->addr + obj_section_addr (sec);
2260 return map_sym->entry;
2269 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2270 instruction list from the ARM exception table entry ENTRY, allocate and
2271 return a prologue cache structure describing how to unwind this frame.
2273 Return NULL if the unwinding instruction list contains a "spare",
2274 "reserved" or "refuse to unwind" instruction as defined in section
2275 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2276 for the ARM Architecture" document. */
2278 static struct arm_prologue_cache *
2279 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2284 struct arm_prologue_cache *cache;
2285 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2286 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2292 /* Whenever we reload SP, we actually have to retrieve its
2293 actual value in the current frame. */
2296 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2298 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2299 vsp = get_frame_register_unsigned (this_frame, reg);
2303 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2304 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2310 /* Decode next unwind instruction. */
2313 if ((insn & 0xc0) == 0)
2315 int offset = insn & 0x3f;
2316 vsp += (offset << 2) + 4;
2318 else if ((insn & 0xc0) == 0x40)
2320 int offset = insn & 0x3f;
2321 vsp -= (offset << 2) + 4;
2323 else if ((insn & 0xf0) == 0x80)
2325 int mask = ((insn & 0xf) << 8) | *entry++;
2328 /* The special case of an all-zero mask identifies
2329 "Refuse to unwind". We return NULL to fall back
2330 to the prologue analyzer. */
2334 /* Pop registers r4..r15 under mask. */
2335 for (i = 0; i < 12; i++)
2336 if (mask & (1 << i))
2338 cache->saved_regs[4 + i].addr = vsp;
2342 /* Special-case popping SP -- we need to reload vsp. */
2343 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2346 else if ((insn & 0xf0) == 0x90)
2348 int reg = insn & 0xf;
2350 /* Reserved cases. */
2351 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2354 /* Set SP from another register and mark VSP for reload. */
2355 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2358 else if ((insn & 0xf0) == 0xa0)
2360 int count = insn & 0x7;
2361 int pop_lr = (insn & 0x8) != 0;
2364 /* Pop r4..r[4+count]. */
2365 for (i = 0; i <= count; i++)
2367 cache->saved_regs[4 + i].addr = vsp;
2371 /* If indicated by flag, pop LR as well. */
2374 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2378 else if (insn == 0xb0)
2380 /* We could only have updated PC by popping into it; if so, it
2381 will show up as address. Otherwise, copy LR into PC. */
2382 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2383 cache->saved_regs[ARM_PC_REGNUM]
2384 = cache->saved_regs[ARM_LR_REGNUM];
2389 else if (insn == 0xb1)
2391 int mask = *entry++;
2394 /* All-zero mask and mask >= 16 is "spare". */
2395 if (mask == 0 || mask >= 16)
2398 /* Pop r0..r3 under mask. */
2399 for (i = 0; i < 4; i++)
2400 if (mask & (1 << i))
2402 cache->saved_regs[i].addr = vsp;
2406 else if (insn == 0xb2)
2408 ULONGEST offset = 0;
2413 offset |= (*entry & 0x7f) << shift;
2416 while (*entry++ & 0x80);
2418 vsp += 0x204 + (offset << 2);
2420 else if (insn == 0xb3)
2422 int start = *entry >> 4;
2423 int count = (*entry++) & 0xf;
2426 /* Only registers D0..D15 are valid here. */
2427 if (start + count >= 16)
2430 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2431 for (i = 0; i <= count; i++)
2433 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2437 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2440 else if ((insn & 0xf8) == 0xb8)
2442 int count = insn & 0x7;
2445 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2446 for (i = 0; i <= count; i++)
2448 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2452 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2455 else if (insn == 0xc6)
2457 int start = *entry >> 4;
2458 int count = (*entry++) & 0xf;
2461 /* Only registers WR0..WR15 are valid. */
2462 if (start + count >= 16)
2465 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2466 for (i = 0; i <= count; i++)
2468 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2472 else if (insn == 0xc7)
2474 int mask = *entry++;
2477 /* All-zero mask and mask >= 16 is "spare". */
2478 if (mask == 0 || mask >= 16)
2481 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2482 for (i = 0; i < 4; i++)
2483 if (mask & (1 << i))
2485 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2489 else if ((insn & 0xf8) == 0xc0)
2491 int count = insn & 0x7;
2494 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2495 for (i = 0; i <= count; i++)
2497 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2501 else if (insn == 0xc8)
2503 int start = *entry >> 4;
2504 int count = (*entry++) & 0xf;
2507 /* Only registers D0..D31 are valid. */
2508 if (start + count >= 16)
2511 /* Pop VFP double-precision registers
2512 D[16+start]..D[16+start+count]. */
2513 for (i = 0; i <= count; i++)
2515 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2519 else if (insn == 0xc9)
2521 int start = *entry >> 4;
2522 int count = (*entry++) & 0xf;
2525 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2526 for (i = 0; i <= count; i++)
2528 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2532 else if ((insn & 0xf8) == 0xd0)
2534 int count = insn & 0x7;
2537 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2538 for (i = 0; i <= count; i++)
2540 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2546 /* Everything else is "spare". */
2551 /* If we restore SP from a register, assume this was the frame register.
2552 Otherwise just fall back to SP as frame register. */
2553 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2554 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2556 cache->framereg = ARM_SP_REGNUM;
2558 /* Determine offset to previous frame. */
2560 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2562 /* We already got the previous SP. */
2563 cache->prev_sp = vsp;
2568 /* Unwinding via ARM exception table entries. Note that the sniffer
2569 already computes a filled-in prologue cache, which is then used
2570 with the same arm_prologue_this_id and arm_prologue_prev_register
2571 routines also used for prologue-parsing based unwinding. */
2574 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2575 struct frame_info *this_frame,
2576 void **this_prologue_cache)
2578 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2579 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2580 CORE_ADDR addr_in_block, exidx_region, func_start;
2581 struct arm_prologue_cache *cache;
2584 /* See if we have an ARM exception table entry covering this address. */
2585 addr_in_block = get_frame_address_in_block (this_frame);
2586 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2590 /* The ARM exception table does not describe unwind information
2591 for arbitrary PC values, but is guaranteed to be correct only
2592 at call sites. We have to decide here whether we want to use
2593 ARM exception table information for this frame, or fall back
2594 to using prologue parsing. (Note that if we have DWARF CFI,
2595 this sniffer isn't even called -- CFI is always preferred.)
2597 Before we make this decision, however, we check whether we
2598 actually have *symbol* information for the current frame.
2599 If not, prologue parsing would not work anyway, so we might
2600 as well use the exception table and hope for the best. */
2601 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2605 /* If the next frame is "normal", we are at a call site in this
2606 frame, so exception information is guaranteed to be valid. */
2607 if (get_next_frame (this_frame)
2608 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2611 /* We also assume exception information is valid if we're currently
2612 blocked in a system call. The system library is supposed to
2613 ensure this, so that e.g. pthread cancellation works. */
2614 if (arm_frame_is_thumb (this_frame))
2618 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2619 byte_order_for_code, &insn)
2620 && (insn & 0xff00) == 0xdf00 /* svc */)
2627 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2628 byte_order_for_code, &insn)
2629 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2633 /* Bail out if we don't know that exception information is valid. */
2637 /* The ARM exception index does not mark the *end* of the region
2638 covered by the entry, and some functions will not have any entry.
2639 To correctly recognize the end of the covered region, the linker
2640 should have inserted dummy records with a CANTUNWIND marker.
2642 Unfortunately, current versions of GNU ld do not reliably do
2643 this, and thus we may have found an incorrect entry above.
2644 As a (temporary) sanity check, we only use the entry if it
2645 lies *within* the bounds of the function. Note that this check
2646 might reject perfectly valid entries that just happen to cover
2647 multiple functions; therefore this check ought to be removed
2648 once the linker is fixed. */
2649 if (func_start > exidx_region)
2653 /* Decode the list of unwinding instructions into a prologue cache.
2654 Note that this may fail due to e.g. a "refuse to unwind" code. */
2655 cache = arm_exidx_fill_cache (this_frame, entry);
2659 *this_prologue_cache = cache;
2663 struct frame_unwind arm_exidx_unwind = {
2665 default_frame_unwind_stop_reason,
2666 arm_prologue_this_id,
2667 arm_prologue_prev_register,
2669 arm_exidx_unwind_sniffer
2672 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2673 trampoline, return the target PC. Otherwise return 0.
2675 void call0a (char c, short s, int i, long l) {}
2679 (*pointer_to_call0a) (c, s, i, l);
2682 Instead of calling a stub library function _call_via_xx (xx is
2683 the register name), GCC may inline the trampoline in the object
2684 file as below (register r2 has the address of call0a).
2687 .type main, %function
2696 The trampoline 'bx r2' doesn't belong to main. */
2699 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2701 /* The heuristics of recognizing such trampoline is that FRAME is
2702 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2703 if (arm_frame_is_thumb (frame))
2707 if (target_read_memory (pc, buf, 2) == 0)
2709 struct gdbarch *gdbarch = get_frame_arch (frame);
2710 enum bfd_endian byte_order_for_code
2711 = gdbarch_byte_order_for_code (gdbarch);
2713 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2715 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2718 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2720 /* Clear the LSB so that gdb core sets step-resume
2721 breakpoint at the right address. */
2722 return UNMAKE_THUMB_ADDR (dest);
2730 static struct arm_prologue_cache *
2731 arm_make_stub_cache (struct frame_info *this_frame)
2733 struct arm_prologue_cache *cache;
2735 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2736 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2738 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2743 /* Our frame ID for a stub frame is the current SP and LR. */
2746 arm_stub_this_id (struct frame_info *this_frame,
2748 struct frame_id *this_id)
2750 struct arm_prologue_cache *cache;
2752 if (*this_cache == NULL)
2753 *this_cache = arm_make_stub_cache (this_frame);
2754 cache = (struct arm_prologue_cache *) *this_cache;
2756 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2760 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2761 struct frame_info *this_frame,
2762 void **this_prologue_cache)
2764 CORE_ADDR addr_in_block;
2766 CORE_ADDR pc, start_addr;
2769 addr_in_block = get_frame_address_in_block (this_frame);
2770 pc = get_frame_pc (this_frame);
2771 if (in_plt_section (addr_in_block)
2772 /* We also use the stub winder if the target memory is unreadable
2773 to avoid having the prologue unwinder trying to read it. */
2774 || target_read_memory (pc, dummy, 4) != 0)
2777 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2778 && arm_skip_bx_reg (this_frame, pc) != 0)
2784 struct frame_unwind arm_stub_unwind = {
2786 default_frame_unwind_stop_reason,
2788 arm_prologue_prev_register,
2790 arm_stub_unwind_sniffer
2793 /* Put here the code to store, into CACHE->saved_regs, the addresses
2794 of the saved registers of frame described by THIS_FRAME. CACHE is
2797 static struct arm_prologue_cache *
2798 arm_m_exception_cache (struct frame_info *this_frame)
2800 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2801 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2802 struct arm_prologue_cache *cache;
2803 CORE_ADDR unwound_sp;
2806 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2807 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2809 unwound_sp = get_frame_register_unsigned (this_frame,
2812 /* The hardware saves eight 32-bit words, comprising xPSR,
2813 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2814 "B1.5.6 Exception entry behavior" in
2815 "ARMv7-M Architecture Reference Manual". */
2816 cache->saved_regs[0].addr = unwound_sp;
2817 cache->saved_regs[1].addr = unwound_sp + 4;
2818 cache->saved_regs[2].addr = unwound_sp + 8;
2819 cache->saved_regs[3].addr = unwound_sp + 12;
2820 cache->saved_regs[12].addr = unwound_sp + 16;
2821 cache->saved_regs[14].addr = unwound_sp + 20;
2822 cache->saved_regs[15].addr = unwound_sp + 24;
2823 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2825 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2826 aligner between the top of the 32-byte stack frame and the
2827 previous context's stack pointer. */
2828 cache->prev_sp = unwound_sp + 32;
2829 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2830 && (xpsr & (1 << 9)) != 0)
2831 cache->prev_sp += 4;
2836 /* Implementation of function hook 'this_id' in
2837 'struct frame_uwnind'. */
2840 arm_m_exception_this_id (struct frame_info *this_frame,
2842 struct frame_id *this_id)
2844 struct arm_prologue_cache *cache;
2846 if (*this_cache == NULL)
2847 *this_cache = arm_m_exception_cache (this_frame);
2848 cache = (struct arm_prologue_cache *) *this_cache;
2850 /* Our frame ID for a stub frame is the current SP and LR. */
2851 *this_id = frame_id_build (cache->prev_sp,
2852 get_frame_pc (this_frame));
2855 /* Implementation of function hook 'prev_register' in
2856 'struct frame_uwnind'. */
2858 static struct value *
2859 arm_m_exception_prev_register (struct frame_info *this_frame,
2863 struct arm_prologue_cache *cache;
2865 if (*this_cache == NULL)
2866 *this_cache = arm_m_exception_cache (this_frame);
2867 cache = (struct arm_prologue_cache *) *this_cache;
2869 /* The value was already reconstructed into PREV_SP. */
2870 if (prev_regnum == ARM_SP_REGNUM)
2871 return frame_unwind_got_constant (this_frame, prev_regnum,
2874 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2878 /* Implementation of function hook 'sniffer' in
2879 'struct frame_uwnind'. */
2882 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2883 struct frame_info *this_frame,
2884 void **this_prologue_cache)
2886 CORE_ADDR this_pc = get_frame_pc (this_frame);
2888 /* No need to check is_m; this sniffer is only registered for
2889 M-profile architectures. */
2891 /* Exception frames return to one of these magic PCs. Other values
2892 are not defined as of v7-M. See details in "B1.5.8 Exception
2893 return behavior" in "ARMv7-M Architecture Reference Manual". */
2894 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2895 || this_pc == 0xfffffffd)
2901 /* Frame unwinder for M-profile exceptions. */
2903 struct frame_unwind arm_m_exception_unwind =
2906 default_frame_unwind_stop_reason,
2907 arm_m_exception_this_id,
2908 arm_m_exception_prev_register,
2910 arm_m_exception_unwind_sniffer
2914 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2916 struct arm_prologue_cache *cache;
2918 if (*this_cache == NULL)
2919 *this_cache = arm_make_prologue_cache (this_frame);
2920 cache = (struct arm_prologue_cache *) *this_cache;
2922 return cache->prev_sp - cache->framesize;
2925 struct frame_base arm_normal_base = {
2926 &arm_prologue_unwind,
2927 arm_normal_frame_base,
2928 arm_normal_frame_base,
2929 arm_normal_frame_base
2932 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2933 dummy frame. The frame ID's base needs to match the TOS value
2934 saved by save_dummy_frame_tos() and returned from
2935 arm_push_dummy_call, and the PC needs to match the dummy frame's
2938 static struct frame_id
2939 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2941 return frame_id_build (get_frame_register_unsigned (this_frame,
2943 get_frame_pc (this_frame));
2946 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2947 be used to construct the previous frame's ID, after looking up the
2948 containing function). */
2951 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2954 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2955 return arm_addr_bits_remove (gdbarch, pc);
2959 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2961 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2964 static struct value *
2965 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2968 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2970 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2975 /* The PC is normally copied from the return column, which
2976 describes saves of LR. However, that version may have an
2977 extra bit set to indicate Thumb state. The bit is not
2979 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2980 return frame_unwind_got_constant (this_frame, regnum,
2981 arm_addr_bits_remove (gdbarch, lr));
2984 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2985 cpsr = get_frame_register_unsigned (this_frame, regnum);
2986 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2987 if (IS_THUMB_ADDR (lr))
2991 return frame_unwind_got_constant (this_frame, regnum, cpsr);
2994 internal_error (__FILE__, __LINE__,
2995 _("Unexpected register %d"), regnum);
3000 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3001 struct dwarf2_frame_state_reg *reg,
3002 struct frame_info *this_frame)
3008 reg->how = DWARF2_FRAME_REG_FN;
3009 reg->loc.fn = arm_dwarf2_prev_register;
3012 reg->how = DWARF2_FRAME_REG_CFA;
3017 /* Implement the stack_frame_destroyed_p gdbarch method. */
3020 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3022 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3023 unsigned int insn, insn2;
3024 int found_return = 0, found_stack_adjust = 0;
3025 CORE_ADDR func_start, func_end;
3029 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3032 /* The epilogue is a sequence of instructions along the following lines:
3034 - add stack frame size to SP or FP
3035 - [if frame pointer used] restore SP from FP
3036 - restore registers from SP [may include PC]
3037 - a return-type instruction [if PC wasn't already restored]
3039 In a first pass, we scan forward from the current PC and verify the
3040 instructions we find as compatible with this sequence, ending in a
3043 However, this is not sufficient to distinguish indirect function calls
3044 within a function from indirect tail calls in the epilogue in some cases.
3045 Therefore, if we didn't already find any SP-changing instruction during
3046 forward scan, we add a backward scanning heuristic to ensure we actually
3047 are in the epilogue. */
3050 while (scan_pc < func_end && !found_return)
3052 if (target_read_memory (scan_pc, buf, 2))
3056 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3058 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3060 else if (insn == 0x46f7) /* mov pc, lr */
3062 else if (thumb_instruction_restores_sp (insn))
3064 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3067 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3069 if (target_read_memory (scan_pc, buf, 2))
3073 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3075 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3077 if (insn2 & 0x8000) /* <registers> include PC. */
3080 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3081 && (insn2 & 0x0fff) == 0x0b04)
3083 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3086 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3087 && (insn2 & 0x0e00) == 0x0a00)
3099 /* Since any instruction in the epilogue sequence, with the possible
3100 exception of return itself, updates the stack pointer, we need to
3101 scan backwards for at most one instruction. Try either a 16-bit or
3102 a 32-bit instruction. This is just a heuristic, so we do not worry
3103 too much about false positives. */
3105 if (pc - 4 < func_start)
3107 if (target_read_memory (pc - 4, buf, 4))
3110 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3111 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3113 if (thumb_instruction_restores_sp (insn2))
3114 found_stack_adjust = 1;
3115 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3116 found_stack_adjust = 1;
3117 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3118 && (insn2 & 0x0fff) == 0x0b04)
3119 found_stack_adjust = 1;
3120 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3121 && (insn2 & 0x0e00) == 0x0a00)
3122 found_stack_adjust = 1;
3124 return found_stack_adjust;
3127 /* Implement the stack_frame_destroyed_p gdbarch method. */
3130 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3132 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3135 CORE_ADDR func_start, func_end;
3137 if (arm_pc_is_thumb (gdbarch, pc))
3138 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3140 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3143 /* We are in the epilogue if the previous instruction was a stack
3144 adjustment and the next instruction is a possible return (bx, mov
3145 pc, or pop). We could have to scan backwards to find the stack
3146 adjustment, or forwards to find the return, but this is a decent
3147 approximation. First scan forwards. */
3150 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3151 if (bits (insn, 28, 31) != INST_NV)
3153 if ((insn & 0x0ffffff0) == 0x012fff10)
3156 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3159 else if ((insn & 0x0fff0000) == 0x08bd0000
3160 && (insn & 0x0000c000) != 0)
3161 /* POP (LDMIA), including PC or LR. */
3168 /* Scan backwards. This is just a heuristic, so do not worry about
3169 false positives from mode changes. */
3171 if (pc < func_start + 4)
3174 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3175 if (arm_instruction_restores_sp (insn))
3182 /* When arguments must be pushed onto the stack, they go on in reverse
3183 order. The code below implements a FILO (stack) to do this. */
3188 struct stack_item *prev;
3192 static struct stack_item *
3193 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3195 struct stack_item *si;
3196 si = XNEW (struct stack_item);
3197 si->data = (gdb_byte *) xmalloc (len);
3200 memcpy (si->data, contents, len);
3204 static struct stack_item *
3205 pop_stack_item (struct stack_item *si)
3207 struct stack_item *dead = si;
3215 /* Return the alignment (in bytes) of the given type. */
3218 arm_type_align (struct type *t)
3224 t = check_typedef (t);
3225 switch (TYPE_CODE (t))
3228 /* Should never happen. */
3229 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3233 case TYPE_CODE_ENUM:
3237 case TYPE_CODE_RANGE:
3239 case TYPE_CODE_CHAR:
3240 case TYPE_CODE_BOOL:
3241 return TYPE_LENGTH (t);
3243 case TYPE_CODE_ARRAY:
3244 if (TYPE_VECTOR (t))
3246 /* Use the natural alignment for vector types (the same for
3247 scalar type), but the maximum alignment is 64-bit. */
3248 if (TYPE_LENGTH (t) > 8)
3251 return TYPE_LENGTH (t);
3254 return arm_type_align (TYPE_TARGET_TYPE (t));
3255 case TYPE_CODE_COMPLEX:
3256 return arm_type_align (TYPE_TARGET_TYPE (t));
3258 case TYPE_CODE_STRUCT:
3259 case TYPE_CODE_UNION:
3261 for (n = 0; n < TYPE_NFIELDS (t); n++)
3263 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3271 /* Possible base types for a candidate for passing and returning in
3274 enum arm_vfp_cprc_base_type
3283 /* The length of one element of base type B. */
3286 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3290 case VFP_CPRC_SINGLE:
3292 case VFP_CPRC_DOUBLE:
3294 case VFP_CPRC_VEC64:
3296 case VFP_CPRC_VEC128:
3299 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3304 /* The character ('s', 'd' or 'q') for the type of VFP register used
3305 for passing base type B. */
3308 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3312 case VFP_CPRC_SINGLE:
3314 case VFP_CPRC_DOUBLE:
3316 case VFP_CPRC_VEC64:
3318 case VFP_CPRC_VEC128:
3321 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3326 /* Determine whether T may be part of a candidate for passing and
3327 returning in VFP registers, ignoring the limit on the total number
3328 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3329 classification of the first valid component found; if it is not
3330 VFP_CPRC_UNKNOWN, all components must have the same classification
3331 as *BASE_TYPE. If it is found that T contains a type not permitted
3332 for passing and returning in VFP registers, a type differently
3333 classified from *BASE_TYPE, or two types differently classified
3334 from each other, return -1, otherwise return the total number of
3335 base-type elements found (possibly 0 in an empty structure or
3336 array). Vector types are not currently supported, matching the
3337 generic AAPCS support. */
3340 arm_vfp_cprc_sub_candidate (struct type *t,
3341 enum arm_vfp_cprc_base_type *base_type)
3343 t = check_typedef (t);
3344 switch (TYPE_CODE (t))
3347 switch (TYPE_LENGTH (t))
3350 if (*base_type == VFP_CPRC_UNKNOWN)
3351 *base_type = VFP_CPRC_SINGLE;
3352 else if (*base_type != VFP_CPRC_SINGLE)
3357 if (*base_type == VFP_CPRC_UNKNOWN)
3358 *base_type = VFP_CPRC_DOUBLE;
3359 else if (*base_type != VFP_CPRC_DOUBLE)
3368 case TYPE_CODE_COMPLEX:
3369 /* Arguments of complex T where T is one of the types float or
3370 double get treated as if they are implemented as:
3379 switch (TYPE_LENGTH (t))
3382 if (*base_type == VFP_CPRC_UNKNOWN)
3383 *base_type = VFP_CPRC_SINGLE;
3384 else if (*base_type != VFP_CPRC_SINGLE)
3389 if (*base_type == VFP_CPRC_UNKNOWN)
3390 *base_type = VFP_CPRC_DOUBLE;
3391 else if (*base_type != VFP_CPRC_DOUBLE)
3400 case TYPE_CODE_ARRAY:
3402 if (TYPE_VECTOR (t))
3404 /* A 64-bit or 128-bit containerized vector type are VFP
3406 switch (TYPE_LENGTH (t))
3409 if (*base_type == VFP_CPRC_UNKNOWN)
3410 *base_type = VFP_CPRC_VEC64;
3413 if (*base_type == VFP_CPRC_UNKNOWN)
3414 *base_type = VFP_CPRC_VEC128;
3425 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3429 if (TYPE_LENGTH (t) == 0)
3431 gdb_assert (count == 0);
3434 else if (count == 0)
3436 unitlen = arm_vfp_cprc_unit_length (*base_type);
3437 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3438 return TYPE_LENGTH (t) / unitlen;
3443 case TYPE_CODE_STRUCT:
3448 for (i = 0; i < TYPE_NFIELDS (t); i++)
3450 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3452 if (sub_count == -1)
3456 if (TYPE_LENGTH (t) == 0)
3458 gdb_assert (count == 0);
3461 else if (count == 0)
3463 unitlen = arm_vfp_cprc_unit_length (*base_type);
3464 if (TYPE_LENGTH (t) != unitlen * count)
3469 case TYPE_CODE_UNION:
3474 for (i = 0; i < TYPE_NFIELDS (t); i++)
3476 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3478 if (sub_count == -1)
3480 count = (count > sub_count ? count : sub_count);
3482 if (TYPE_LENGTH (t) == 0)
3484 gdb_assert (count == 0);
3487 else if (count == 0)
3489 unitlen = arm_vfp_cprc_unit_length (*base_type);
3490 if (TYPE_LENGTH (t) != unitlen * count)
3502 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3503 if passed to or returned from a non-variadic function with the VFP
3504 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3505 *BASE_TYPE to the base type for T and *COUNT to the number of
3506 elements of that base type before returning. */
3509 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3512 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3513 int c = arm_vfp_cprc_sub_candidate (t, &b);
3514 if (c <= 0 || c > 4)
3521 /* Return 1 if the VFP ABI should be used for passing arguments to and
3522 returning values from a function of type FUNC_TYPE, 0
3526 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3528 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3529 /* Variadic functions always use the base ABI. Assume that functions
3530 without debug info are not variadic. */
3531 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3533 /* The VFP ABI is only supported as a variant of AAPCS. */
3534 if (tdep->arm_abi != ARM_ABI_AAPCS)
3536 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3539 /* We currently only support passing parameters in integer registers, which
3540 conforms with GCC's default model, and VFP argument passing following
3541 the VFP variant of AAPCS. Several other variants exist and
3542 we should probably support some of them based on the selected ABI. */
3545 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3546 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3547 struct value **args, CORE_ADDR sp, int struct_return,
3548 CORE_ADDR struct_addr)
3550 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3554 struct stack_item *si = NULL;
3557 unsigned vfp_regs_free = (1 << 16) - 1;
3559 /* Determine the type of this function and whether the VFP ABI
3561 ftype = check_typedef (value_type (function));
3562 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3563 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3564 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3566 /* Set the return address. For the ARM, the return breakpoint is
3567 always at BP_ADDR. */
3568 if (arm_pc_is_thumb (gdbarch, bp_addr))
3570 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3572 /* Walk through the list of args and determine how large a temporary
3573 stack is required. Need to take care here as structs may be
3574 passed on the stack, and we have to push them. */
3577 argreg = ARM_A1_REGNUM;
3580 /* The struct_return pointer occupies the first parameter
3581 passing register. */
3585 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3586 gdbarch_register_name (gdbarch, argreg),
3587 paddress (gdbarch, struct_addr));
3588 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3592 for (argnum = 0; argnum < nargs; argnum++)
3595 struct type *arg_type;
3596 struct type *target_type;
3597 enum type_code typecode;
3598 const bfd_byte *val;
3600 enum arm_vfp_cprc_base_type vfp_base_type;
3602 int may_use_core_reg = 1;
3604 arg_type = check_typedef (value_type (args[argnum]));
3605 len = TYPE_LENGTH (arg_type);
3606 target_type = TYPE_TARGET_TYPE (arg_type);
3607 typecode = TYPE_CODE (arg_type);
3608 val = value_contents (args[argnum]);
3610 align = arm_type_align (arg_type);
3611 /* Round alignment up to a whole number of words. */
3612 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3613 /* Different ABIs have different maximum alignments. */
3614 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3616 /* The APCS ABI only requires word alignment. */
3617 align = INT_REGISTER_SIZE;
3621 /* The AAPCS requires at most doubleword alignment. */
3622 if (align > INT_REGISTER_SIZE * 2)
3623 align = INT_REGISTER_SIZE * 2;
3627 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3635 /* Because this is a CPRC it cannot go in a core register or
3636 cause a core register to be skipped for alignment.
3637 Either it goes in VFP registers and the rest of this loop
3638 iteration is skipped for this argument, or it goes on the
3639 stack (and the stack alignment code is correct for this
3641 may_use_core_reg = 0;
3643 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3644 shift = unit_length / 4;
3645 mask = (1 << (shift * vfp_base_count)) - 1;
3646 for (regno = 0; regno < 16; regno += shift)
3647 if (((vfp_regs_free >> regno) & mask) == mask)
3656 vfp_regs_free &= ~(mask << regno);
3657 reg_scaled = regno / shift;
3658 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3659 for (i = 0; i < vfp_base_count; i++)
3663 if (reg_char == 'q')
3664 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3665 val + i * unit_length);
3668 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3669 reg_char, reg_scaled + i);
3670 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3672 regcache_cooked_write (regcache, regnum,
3673 val + i * unit_length);
3680 /* This CPRC could not go in VFP registers, so all VFP
3681 registers are now marked as used. */
3686 /* Push stack padding for dowubleword alignment. */
3687 if (nstack & (align - 1))
3689 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3690 nstack += INT_REGISTER_SIZE;
3693 /* Doubleword aligned quantities must go in even register pairs. */
3694 if (may_use_core_reg
3695 && argreg <= ARM_LAST_ARG_REGNUM
3696 && align > INT_REGISTER_SIZE
3700 /* If the argument is a pointer to a function, and it is a
3701 Thumb function, create a LOCAL copy of the value and set
3702 the THUMB bit in it. */
3703 if (TYPE_CODE_PTR == typecode
3704 && target_type != NULL
3705 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3707 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3708 if (arm_pc_is_thumb (gdbarch, regval))
3710 bfd_byte *copy = (bfd_byte *) alloca (len);
3711 store_unsigned_integer (copy, len, byte_order,
3712 MAKE_THUMB_ADDR (regval));
3717 /* Copy the argument to general registers or the stack in
3718 register-sized pieces. Large arguments are split between
3719 registers and stack. */
3722 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3724 = extract_unsigned_integer (val, partial_len, byte_order);
3726 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3728 /* The argument is being passed in a general purpose
3730 if (byte_order == BFD_ENDIAN_BIG)
3731 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3733 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3735 gdbarch_register_name
3737 phex (regval, INT_REGISTER_SIZE));
3738 regcache_cooked_write_unsigned (regcache, argreg, regval);
3743 gdb_byte buf[INT_REGISTER_SIZE];
3745 memset (buf, 0, sizeof (buf));
3746 store_unsigned_integer (buf, partial_len, byte_order, regval);
3748 /* Push the arguments onto the stack. */
3750 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3752 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3753 nstack += INT_REGISTER_SIZE;
3760 /* If we have an odd number of words to push, then decrement the stack
3761 by one word now, so first stack argument will be dword aligned. */
3768 write_memory (sp, si->data, si->len);
3769 si = pop_stack_item (si);
3772 /* Finally, update teh SP register. */
3773 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3779 /* Always align the frame to an 8-byte boundary. This is required on
3780 some platforms and harmless on the rest. */
3783 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3785 /* Align the stack to eight bytes. */
3786 return sp & ~ (CORE_ADDR) 7;
3790 print_fpu_flags (struct ui_file *file, int flags)
3792 if (flags & (1 << 0))
3793 fputs_filtered ("IVO ", file);
3794 if (flags & (1 << 1))
3795 fputs_filtered ("DVZ ", file);
3796 if (flags & (1 << 2))
3797 fputs_filtered ("OFL ", file);
3798 if (flags & (1 << 3))
3799 fputs_filtered ("UFL ", file);
3800 if (flags & (1 << 4))
3801 fputs_filtered ("INX ", file);
3802 fputc_filtered ('\n', file);
3805 /* Print interesting information about the floating point processor
3806 (if present) or emulator. */
3808 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3809 struct frame_info *frame, const char *args)
3811 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3814 type = (status >> 24) & 127;
3815 if (status & (1 << 31))
3816 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3818 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3819 /* i18n: [floating point unit] mask */
3820 fputs_filtered (_("mask: "), file);
3821 print_fpu_flags (file, status >> 16);
3822 /* i18n: [floating point unit] flags */
3823 fputs_filtered (_("flags: "), file);
3824 print_fpu_flags (file, status);
3827 /* Construct the ARM extended floating point type. */
3828 static struct type *
3829 arm_ext_type (struct gdbarch *gdbarch)
3831 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3833 if (!tdep->arm_ext_type)
3835 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3836 floatformats_arm_ext);
3838 return tdep->arm_ext_type;
3841 static struct type *
3842 arm_neon_double_type (struct gdbarch *gdbarch)
3844 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3846 if (tdep->neon_double_type == NULL)
3848 struct type *t, *elem;
3850 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3852 elem = builtin_type (gdbarch)->builtin_uint8;
3853 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3854 elem = builtin_type (gdbarch)->builtin_uint16;
3855 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3856 elem = builtin_type (gdbarch)->builtin_uint32;
3857 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3858 elem = builtin_type (gdbarch)->builtin_uint64;
3859 append_composite_type_field (t, "u64", elem);
3860 elem = builtin_type (gdbarch)->builtin_float;
3861 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3862 elem = builtin_type (gdbarch)->builtin_double;
3863 append_composite_type_field (t, "f64", elem);
3865 TYPE_VECTOR (t) = 1;
3866 TYPE_NAME (t) = "neon_d";
3867 tdep->neon_double_type = t;
3870 return tdep->neon_double_type;
3873 /* FIXME: The vector types are not correctly ordered on big-endian
3874 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3875 bits of d0 - regardless of what unit size is being held in d0. So
3876 the offset of the first uint8 in d0 is 7, but the offset of the
3877 first float is 4. This code works as-is for little-endian
3880 static struct type *
3881 arm_neon_quad_type (struct gdbarch *gdbarch)
3883 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3885 if (tdep->neon_quad_type == NULL)
3887 struct type *t, *elem;
3889 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3891 elem = builtin_type (gdbarch)->builtin_uint8;
3892 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3893 elem = builtin_type (gdbarch)->builtin_uint16;
3894 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3895 elem = builtin_type (gdbarch)->builtin_uint32;
3896 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3897 elem = builtin_type (gdbarch)->builtin_uint64;
3898 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3899 elem = builtin_type (gdbarch)->builtin_float;
3900 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3901 elem = builtin_type (gdbarch)->builtin_double;
3902 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3904 TYPE_VECTOR (t) = 1;
3905 TYPE_NAME (t) = "neon_q";
3906 tdep->neon_quad_type = t;
3909 return tdep->neon_quad_type;
3912 /* Return the GDB type object for the "standard" data type of data in
3915 static struct type *
3916 arm_register_type (struct gdbarch *gdbarch, int regnum)
3918 int num_regs = gdbarch_num_regs (gdbarch);
3920 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3921 && regnum >= num_regs && regnum < num_regs + 32)
3922 return builtin_type (gdbarch)->builtin_float;
3924 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3925 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3926 return arm_neon_quad_type (gdbarch);
3928 /* If the target description has register information, we are only
3929 in this function so that we can override the types of
3930 double-precision registers for NEON. */
3931 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3933 struct type *t = tdesc_register_type (gdbarch, regnum);
3935 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3936 && TYPE_CODE (t) == TYPE_CODE_FLT
3937 && gdbarch_tdep (gdbarch)->have_neon)
3938 return arm_neon_double_type (gdbarch);
3943 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3945 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3946 return builtin_type (gdbarch)->builtin_void;
3948 return arm_ext_type (gdbarch);
3950 else if (regnum == ARM_SP_REGNUM)
3951 return builtin_type (gdbarch)->builtin_data_ptr;
3952 else if (regnum == ARM_PC_REGNUM)
3953 return builtin_type (gdbarch)->builtin_func_ptr;
3954 else if (regnum >= ARRAY_SIZE (arm_register_names))
3955 /* These registers are only supported on targets which supply
3956 an XML description. */
3957 return builtin_type (gdbarch)->builtin_int0;
3959 return builtin_type (gdbarch)->builtin_uint32;
3962 /* Map a DWARF register REGNUM onto the appropriate GDB register
3966 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3968 /* Core integer regs. */
3969 if (reg >= 0 && reg <= 15)
3972 /* Legacy FPA encoding. These were once used in a way which
3973 overlapped with VFP register numbering, so their use is
3974 discouraged, but GDB doesn't support the ARM toolchain
3975 which used them for VFP. */
3976 if (reg >= 16 && reg <= 23)
3977 return ARM_F0_REGNUM + reg - 16;
3979 /* New assignments for the FPA registers. */
3980 if (reg >= 96 && reg <= 103)
3981 return ARM_F0_REGNUM + reg - 96;
3983 /* WMMX register assignments. */
3984 if (reg >= 104 && reg <= 111)
3985 return ARM_WCGR0_REGNUM + reg - 104;
3987 if (reg >= 112 && reg <= 127)
3988 return ARM_WR0_REGNUM + reg - 112;
3990 if (reg >= 192 && reg <= 199)
3991 return ARM_WC0_REGNUM + reg - 192;
3993 /* VFP v2 registers. A double precision value is actually
3994 in d1 rather than s2, but the ABI only defines numbering
3995 for the single precision registers. This will "just work"
3996 in GDB for little endian targets (we'll read eight bytes,
3997 starting in s0 and then progressing to s1), but will be
3998 reversed on big endian targets with VFP. This won't
3999 be a problem for the new Neon quad registers; you're supposed
4000 to use DW_OP_piece for those. */
4001 if (reg >= 64 && reg <= 95)
4005 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4006 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4010 /* VFP v3 / Neon registers. This range is also used for VFP v2
4011 registers, except that it now describes d0 instead of s0. */
4012 if (reg >= 256 && reg <= 287)
4016 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4017 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4024 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4026 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4029 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4031 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4032 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4034 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4035 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4037 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4038 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4040 if (reg < NUM_GREGS)
4041 return SIM_ARM_R0_REGNUM + reg;
4044 if (reg < NUM_FREGS)
4045 return SIM_ARM_FP0_REGNUM + reg;
4048 if (reg < NUM_SREGS)
4049 return SIM_ARM_FPS_REGNUM + reg;
4052 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4055 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4056 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4057 It is thought that this is is the floating-point register format on
4058 little-endian systems. */
4061 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4062 void *dbl, int endianess)
4066 if (endianess == BFD_ENDIAN_BIG)
4067 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4069 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4071 floatformat_from_doublest (fmt, &d, dbl);
4075 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4080 floatformat_to_doublest (fmt, ptr, &d);
4081 if (endianess == BFD_ENDIAN_BIG)
4082 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4084 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4088 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4089 of the appropriate mode (as encoded in the PC value), even if this
4090 differs from what would be expected according to the symbol tables. */
4093 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4094 struct address_space *aspace,
4097 struct cleanup *old_chain
4098 = make_cleanup_restore_integer (&arm_override_mode);
4100 arm_override_mode = IS_THUMB_ADDR (pc);
4101 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4103 insert_single_step_breakpoint (gdbarch, aspace, pc);
4105 do_cleanups (old_chain);
4108 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4109 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4110 NULL if an error occurs. BUF is freed. */
4113 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4114 int old_len, int new_len)
4117 int bytes_to_read = new_len - old_len;
4119 new_buf = (gdb_byte *) xmalloc (new_len);
4120 memcpy (new_buf + bytes_to_read, buf, old_len);
4122 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4130 /* An IT block is at most the 2-byte IT instruction followed by
4131 four 4-byte instructions. The furthest back we must search to
4132 find an IT block that affects the current instruction is thus
4133 2 + 3 * 4 == 14 bytes. */
4134 #define MAX_IT_BLOCK_PREFIX 14
4136 /* Use a quick scan if there are more than this many bytes of
4138 #define IT_SCAN_THRESHOLD 32
4140 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4141 A breakpoint in an IT block may not be hit, depending on the
4144 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4148 CORE_ADDR boundary, func_start;
4150 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4151 int i, any, last_it, last_it_count;
4153 /* If we are using BKPT breakpoints, none of this is necessary. */
4154 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4157 /* ARM mode does not have this problem. */
4158 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4161 /* We are setting a breakpoint in Thumb code that could potentially
4162 contain an IT block. The first step is to find how much Thumb
4163 code there is; we do not need to read outside of known Thumb
4165 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4167 /* Thumb-2 code must have mapping symbols to have a chance. */
4170 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4172 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4173 && func_start > boundary)
4174 boundary = func_start;
4176 /* Search for a candidate IT instruction. We have to do some fancy
4177 footwork to distinguish a real IT instruction from the second
4178 half of a 32-bit instruction, but there is no need for that if
4179 there's no candidate. */
4180 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4182 /* No room for an IT instruction. */
4185 buf = (gdb_byte *) xmalloc (buf_len);
4186 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4189 for (i = 0; i < buf_len; i += 2)
4191 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4192 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4205 /* OK, the code bytes before this instruction contain at least one
4206 halfword which resembles an IT instruction. We know that it's
4207 Thumb code, but there are still two possibilities. Either the
4208 halfword really is an IT instruction, or it is the second half of
4209 a 32-bit Thumb instruction. The only way we can tell is to
4210 scan forwards from a known instruction boundary. */
4211 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4215 /* There's a lot of code before this instruction. Start with an
4216 optimistic search; it's easy to recognize halfwords that can
4217 not be the start of a 32-bit instruction, and use that to
4218 lock on to the instruction boundaries. */
4219 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4222 buf_len = IT_SCAN_THRESHOLD;
4225 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4227 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4228 if (thumb_insn_size (inst1) == 2)
4235 /* At this point, if DEFINITE, BUF[I] is the first place we
4236 are sure that we know the instruction boundaries, and it is far
4237 enough from BPADDR that we could not miss an IT instruction
4238 affecting BPADDR. If ! DEFINITE, give up - start from a
4242 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4246 buf_len = bpaddr - boundary;
4252 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4255 buf_len = bpaddr - boundary;
4259 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4266 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 else if (inst1 & 0x0002)
4273 else if (inst1 & 0x0004)
4278 i += thumb_insn_size (inst1);
4284 /* There wasn't really an IT instruction after all. */
4287 if (last_it_count < 1)
4288 /* It was too far away. */
4291 /* This really is a trouble spot. Move the breakpoint to the IT
4293 return bpaddr - buf_len + last_it;
4296 /* ARM displaced stepping support.
4298 Generally ARM displaced stepping works as follows:
4300 1. When an instruction is to be single-stepped, it is first decoded by
4301 arm_process_displaced_insn. Depending on the type of instruction, it is
4302 then copied to a scratch location, possibly in a modified form. The
4303 copy_* set of functions performs such modification, as necessary. A
4304 breakpoint is placed after the modified instruction in the scratch space
4305 to return control to GDB. Note in particular that instructions which
4306 modify the PC will no longer do so after modification.
4308 2. The instruction is single-stepped, by setting the PC to the scratch
4309 location address, and resuming. Control returns to GDB when the
4312 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4313 function used for the current instruction. This function's job is to
4314 put the CPU/memory state back to what it would have been if the
4315 instruction had been executed unmodified in its original location. */
4317 /* NOP instruction (mov r0, r0). */
4318 #define ARM_NOP 0xe1a00000
4319 #define THUMB_NOP 0x4600
4321 /* Helper for register reads for displaced stepping. In particular, this
4322 returns the PC as it would be seen by the instruction at its original
4326 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4330 CORE_ADDR from = dsc->insn_addr;
4332 if (regno == ARM_PC_REGNUM)
4334 /* Compute pipeline offset:
4335 - When executing an ARM instruction, PC reads as the address of the
4336 current instruction plus 8.
4337 - When executing a Thumb instruction, PC reads as the address of the
4338 current instruction plus 4. */
4345 if (debug_displaced)
4346 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4347 (unsigned long) from);
4348 return (ULONGEST) from;
4352 regcache_cooked_read_unsigned (regs, regno, &ret);
4353 if (debug_displaced)
4354 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4355 regno, (unsigned long) ret);
4361 displaced_in_arm_mode (struct regcache *regs)
4364 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4366 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4368 return (ps & t_bit) == 0;
4371 /* Write to the PC as from a branch instruction. */
4374 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4378 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4379 architecture versions < 6. */
4380 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4381 val & ~(ULONGEST) 0x3);
4383 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4384 val & ~(ULONGEST) 0x1);
4387 /* Write to the PC as from a branch-exchange instruction. */
4390 bx_write_pc (struct regcache *regs, ULONGEST val)
4393 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4395 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4399 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4400 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4402 else if ((val & 2) == 0)
4404 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4405 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4409 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4410 mode, align dest to 4 bytes). */
4411 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4412 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4413 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4417 /* Write to the PC as if from a load instruction. */
4420 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4423 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4424 bx_write_pc (regs, val);
4426 branch_write_pc (regs, dsc, val);
4429 /* Write to the PC as if from an ALU instruction. */
4432 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4435 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4436 bx_write_pc (regs, val);
4438 branch_write_pc (regs, dsc, val);
4441 /* Helper for writing to registers for displaced stepping. Writing to the PC
4442 has a varying effects depending on the instruction which does the write:
4443 this is controlled by the WRITE_PC argument. */
4446 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4447 int regno, ULONGEST val, enum pc_write_style write_pc)
4449 if (regno == ARM_PC_REGNUM)
4451 if (debug_displaced)
4452 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4453 (unsigned long) val);
4456 case BRANCH_WRITE_PC:
4457 branch_write_pc (regs, dsc, val);
4461 bx_write_pc (regs, val);
4465 load_write_pc (regs, dsc, val);
4469 alu_write_pc (regs, dsc, val);
4472 case CANNOT_WRITE_PC:
4473 warning (_("Instruction wrote to PC in an unexpected way when "
4474 "single-stepping"));
4478 internal_error (__FILE__, __LINE__,
4479 _("Invalid argument to displaced_write_reg"));
4482 dsc->wrote_to_pc = 1;
4486 if (debug_displaced)
4487 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4488 regno, (unsigned long) val);
4489 regcache_cooked_write_unsigned (regs, regno, val);
4493 /* This function is used to concisely determine if an instruction INSN
4494 references PC. Register fields of interest in INSN should have the
4495 corresponding fields of BITMASK set to 0b1111. The function
4496 returns return 1 if any of these fields in INSN reference the PC
4497 (also 0b1111, r15), else it returns 0. */
4500 insn_references_pc (uint32_t insn, uint32_t bitmask)
4502 uint32_t lowbit = 1;
4504 while (bitmask != 0)
4508 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4514 mask = lowbit * 0xf;
4516 if ((insn & mask) == mask)
4525 /* The simplest copy function. Many instructions have the same effect no
4526 matter what address they are executed at: in those cases, use this. */
4529 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4530 const char *iname, struct displaced_step_closure *dsc)
4532 if (debug_displaced)
4533 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4534 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4537 dsc->modinsn[0] = insn;
4543 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4544 uint16_t insn2, const char *iname,
4545 struct displaced_step_closure *dsc)
4547 if (debug_displaced)
4548 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4549 "opcode/class '%s' unmodified\n", insn1, insn2,
4552 dsc->modinsn[0] = insn1;
4553 dsc->modinsn[1] = insn2;
4559 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4562 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4564 struct displaced_step_closure *dsc)
4566 if (debug_displaced)
4567 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4568 "opcode/class '%s' unmodified\n", insn,
4571 dsc->modinsn[0] = insn;
4576 /* Preload instructions with immediate offset. */
4579 cleanup_preload (struct gdbarch *gdbarch,
4580 struct regcache *regs, struct displaced_step_closure *dsc)
4582 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4583 if (!dsc->u.preload.immed)
4584 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4588 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4589 struct displaced_step_closure *dsc, unsigned int rn)
4592 /* Preload instructions:
4594 {pli/pld} [rn, #+/-imm]
4596 {pli/pld} [r0, #+/-imm]. */
4598 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4599 rn_val = displaced_read_reg (regs, dsc, rn);
4600 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4601 dsc->u.preload.immed = 1;
4603 dsc->cleanup = &cleanup_preload;
4607 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4608 struct displaced_step_closure *dsc)
4610 unsigned int rn = bits (insn, 16, 19);
4612 if (!insn_references_pc (insn, 0x000f0000ul))
4613 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4615 if (debug_displaced)
4616 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4617 (unsigned long) insn);
4619 dsc->modinsn[0] = insn & 0xfff0ffff;
4621 install_preload (gdbarch, regs, dsc, rn);
4627 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4628 struct regcache *regs, struct displaced_step_closure *dsc)
4630 unsigned int rn = bits (insn1, 0, 3);
4631 unsigned int u_bit = bit (insn1, 7);
4632 int imm12 = bits (insn2, 0, 11);
4635 if (rn != ARM_PC_REGNUM)
4636 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4638 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4639 PLD (literal) Encoding T1. */
4640 if (debug_displaced)
4641 fprintf_unfiltered (gdb_stdlog,
4642 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4643 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4649 /* Rewrite instruction {pli/pld} PC imm12 into:
4650 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4654 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4656 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4657 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4659 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4661 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4662 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4663 dsc->u.preload.immed = 0;
4665 /* {pli/pld} [r0, r1] */
4666 dsc->modinsn[0] = insn1 & 0xfff0;
4667 dsc->modinsn[1] = 0xf001;
4670 dsc->cleanup = &cleanup_preload;
4674 /* Preload instructions with register offset. */
4677 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4678 struct displaced_step_closure *dsc, unsigned int rn,
4681 ULONGEST rn_val, rm_val;
4683 /* Preload register-offset instructions:
4685 {pli/pld} [rn, rm {, shift}]
4687 {pli/pld} [r0, r1 {, shift}]. */
4689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4690 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4691 rn_val = displaced_read_reg (regs, dsc, rn);
4692 rm_val = displaced_read_reg (regs, dsc, rm);
4693 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4694 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4695 dsc->u.preload.immed = 0;
4697 dsc->cleanup = &cleanup_preload;
4701 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4702 struct regcache *regs,
4703 struct displaced_step_closure *dsc)
4705 unsigned int rn = bits (insn, 16, 19);
4706 unsigned int rm = bits (insn, 0, 3);
4709 if (!insn_references_pc (insn, 0x000f000ful))
4710 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4712 if (debug_displaced)
4713 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4714 (unsigned long) insn);
4716 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4718 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4722 /* Copy/cleanup coprocessor load and store instructions. */
4725 cleanup_copro_load_store (struct gdbarch *gdbarch,
4726 struct regcache *regs,
4727 struct displaced_step_closure *dsc)
4729 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4731 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4733 if (dsc->u.ldst.writeback)
4734 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4738 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4739 struct displaced_step_closure *dsc,
4740 int writeback, unsigned int rn)
4744 /* Coprocessor load/store instructions:
4746 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4748 {stc/stc2} [r0, #+/-imm].
4750 ldc/ldc2 are handled identically. */
4752 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4753 rn_val = displaced_read_reg (regs, dsc, rn);
4754 /* PC should be 4-byte aligned. */
4755 rn_val = rn_val & 0xfffffffc;
4756 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4758 dsc->u.ldst.writeback = writeback;
4759 dsc->u.ldst.rn = rn;
4761 dsc->cleanup = &cleanup_copro_load_store;
4765 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4766 struct regcache *regs,
4767 struct displaced_step_closure *dsc)
4769 unsigned int rn = bits (insn, 16, 19);
4771 if (!insn_references_pc (insn, 0x000f0000ul))
4772 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4774 if (debug_displaced)
4775 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4776 "load/store insn %.8lx\n", (unsigned long) insn);
4778 dsc->modinsn[0] = insn & 0xfff0ffff;
4780 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4786 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4787 uint16_t insn2, struct regcache *regs,
4788 struct displaced_step_closure *dsc)
4790 unsigned int rn = bits (insn1, 0, 3);
4792 if (rn != ARM_PC_REGNUM)
4793 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4794 "copro load/store", dsc);
4796 if (debug_displaced)
4797 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4798 "load/store insn %.4x%.4x\n", insn1, insn2);
4800 dsc->modinsn[0] = insn1 & 0xfff0;
4801 dsc->modinsn[1] = insn2;
4804 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4805 doesn't support writeback, so pass 0. */
4806 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4811 /* Clean up branch instructions (actually perform the branch, by setting
4815 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4816 struct displaced_step_closure *dsc)
4818 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4819 int branch_taken = condition_true (dsc->u.branch.cond, status);
4820 enum pc_write_style write_pc = dsc->u.branch.exchange
4821 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4826 if (dsc->u.branch.link)
4828 /* The value of LR should be the next insn of current one. In order
4829 not to confuse logic hanlding later insn `bx lr', if current insn mode
4830 is Thumb, the bit 0 of LR value should be set to 1. */
4831 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4834 next_insn_addr |= 0x1;
4836 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4840 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4843 /* Copy B/BL/BLX instructions with immediate destinations. */
4846 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4847 struct displaced_step_closure *dsc,
4848 unsigned int cond, int exchange, int link, long offset)
4850 /* Implement "BL<cond> <label>" as:
4852 Preparation: cond <- instruction condition
4853 Insn: mov r0, r0 (nop)
4854 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4856 B<cond> similar, but don't set r14 in cleanup. */
4858 dsc->u.branch.cond = cond;
4859 dsc->u.branch.link = link;
4860 dsc->u.branch.exchange = exchange;
4862 dsc->u.branch.dest = dsc->insn_addr;
4863 if (link && exchange)
4864 /* For BLX, offset is computed from the Align (PC, 4). */
4865 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4868 dsc->u.branch.dest += 4 + offset;
4870 dsc->u.branch.dest += 8 + offset;
4872 dsc->cleanup = &cleanup_branch;
4875 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4876 struct regcache *regs, struct displaced_step_closure *dsc)
4878 unsigned int cond = bits (insn, 28, 31);
4879 int exchange = (cond == 0xf);
4880 int link = exchange || bit (insn, 24);
4883 if (debug_displaced)
4884 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4885 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4886 (unsigned long) insn);
4888 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4889 then arrange the switch into Thumb mode. */
4890 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4892 offset = bits (insn, 0, 23) << 2;
4894 if (bit (offset, 25))
4895 offset = offset | ~0x3ffffff;
4897 dsc->modinsn[0] = ARM_NOP;
4899 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4904 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4905 uint16_t insn2, struct regcache *regs,
4906 struct displaced_step_closure *dsc)
4908 int link = bit (insn2, 14);
4909 int exchange = link && !bit (insn2, 12);
4912 int j1 = bit (insn2, 13);
4913 int j2 = bit (insn2, 11);
4914 int s = sbits (insn1, 10, 10);
4915 int i1 = !(j1 ^ bit (insn1, 10));
4916 int i2 = !(j2 ^ bit (insn1, 10));
4918 if (!link && !exchange) /* B */
4920 offset = (bits (insn2, 0, 10) << 1);
4921 if (bit (insn2, 12)) /* Encoding T4 */
4923 offset |= (bits (insn1, 0, 9) << 12)
4929 else /* Encoding T3 */
4931 offset |= (bits (insn1, 0, 5) << 12)
4935 cond = bits (insn1, 6, 9);
4940 offset = (bits (insn1, 0, 9) << 12);
4941 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4942 offset |= exchange ?
4943 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4946 if (debug_displaced)
4947 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4948 "%.4x %.4x with offset %.8lx\n",
4949 link ? (exchange) ? "blx" : "bl" : "b",
4950 insn1, insn2, offset);
4952 dsc->modinsn[0] = THUMB_NOP;
4954 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4958 /* Copy B Thumb instructions. */
4960 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4961 struct displaced_step_closure *dsc)
4963 unsigned int cond = 0;
4965 unsigned short bit_12_15 = bits (insn, 12, 15);
4966 CORE_ADDR from = dsc->insn_addr;
4968 if (bit_12_15 == 0xd)
4970 /* offset = SignExtend (imm8:0, 32) */
4971 offset = sbits ((insn << 1), 0, 8);
4972 cond = bits (insn, 8, 11);
4974 else if (bit_12_15 == 0xe) /* Encoding T2 */
4976 offset = sbits ((insn << 1), 0, 11);
4980 if (debug_displaced)
4981 fprintf_unfiltered (gdb_stdlog,
4982 "displaced: copying b immediate insn %.4x "
4983 "with offset %d\n", insn, offset);
4985 dsc->u.branch.cond = cond;
4986 dsc->u.branch.link = 0;
4987 dsc->u.branch.exchange = 0;
4988 dsc->u.branch.dest = from + 4 + offset;
4990 dsc->modinsn[0] = THUMB_NOP;
4992 dsc->cleanup = &cleanup_branch;
4997 /* Copy BX/BLX with register-specified destinations. */
5000 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5001 struct displaced_step_closure *dsc, int link,
5002 unsigned int cond, unsigned int rm)
5004 /* Implement {BX,BLX}<cond> <reg>" as:
5006 Preparation: cond <- instruction condition
5007 Insn: mov r0, r0 (nop)
5008 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5010 Don't set r14 in cleanup for BX. */
5012 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5014 dsc->u.branch.cond = cond;
5015 dsc->u.branch.link = link;
5017 dsc->u.branch.exchange = 1;
5019 dsc->cleanup = &cleanup_branch;
5023 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5024 struct regcache *regs, struct displaced_step_closure *dsc)
5026 unsigned int cond = bits (insn, 28, 31);
5029 int link = bit (insn, 5);
5030 unsigned int rm = bits (insn, 0, 3);
5032 if (debug_displaced)
5033 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5034 (unsigned long) insn);
5036 dsc->modinsn[0] = ARM_NOP;
5038 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5043 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5044 struct regcache *regs,
5045 struct displaced_step_closure *dsc)
5047 int link = bit (insn, 7);
5048 unsigned int rm = bits (insn, 3, 6);
5050 if (debug_displaced)
5051 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5052 (unsigned short) insn);
5054 dsc->modinsn[0] = THUMB_NOP;
5056 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5062 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5065 cleanup_alu_imm (struct gdbarch *gdbarch,
5066 struct regcache *regs, struct displaced_step_closure *dsc)
5068 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5069 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5070 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5071 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5075 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5076 struct displaced_step_closure *dsc)
5078 unsigned int rn = bits (insn, 16, 19);
5079 unsigned int rd = bits (insn, 12, 15);
5080 unsigned int op = bits (insn, 21, 24);
5081 int is_mov = (op == 0xd);
5082 ULONGEST rd_val, rn_val;
5084 if (!insn_references_pc (insn, 0x000ff000ul))
5085 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5087 if (debug_displaced)
5088 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5089 "%.8lx\n", is_mov ? "move" : "ALU",
5090 (unsigned long) insn);
5092 /* Instruction is of form:
5094 <op><cond> rd, [rn,] #imm
5098 Preparation: tmp1, tmp2 <- r0, r1;
5100 Insn: <op><cond> r0, r1, #imm
5101 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5104 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5105 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5106 rn_val = displaced_read_reg (regs, dsc, rn);
5107 rd_val = displaced_read_reg (regs, dsc, rd);
5108 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5109 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5113 dsc->modinsn[0] = insn & 0xfff00fff;
5115 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5117 dsc->cleanup = &cleanup_alu_imm;
5123 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5124 uint16_t insn2, struct regcache *regs,
5125 struct displaced_step_closure *dsc)
5127 unsigned int op = bits (insn1, 5, 8);
5128 unsigned int rn, rm, rd;
5129 ULONGEST rd_val, rn_val;
5131 rn = bits (insn1, 0, 3); /* Rn */
5132 rm = bits (insn2, 0, 3); /* Rm */
5133 rd = bits (insn2, 8, 11); /* Rd */
5135 /* This routine is only called for instruction MOV. */
5136 gdb_assert (op == 0x2 && rn == 0xf);
5138 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5139 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5141 if (debug_displaced)
5142 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5143 "ALU", insn1, insn2);
5145 /* Instruction is of form:
5147 <op><cond> rd, [rn,] #imm
5151 Preparation: tmp1, tmp2 <- r0, r1;
5153 Insn: <op><cond> r0, r1, #imm
5154 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5157 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5158 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5159 rn_val = displaced_read_reg (regs, dsc, rn);
5160 rd_val = displaced_read_reg (regs, dsc, rd);
5161 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5162 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5165 dsc->modinsn[0] = insn1;
5166 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5169 dsc->cleanup = &cleanup_alu_imm;
5174 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5177 cleanup_alu_reg (struct gdbarch *gdbarch,
5178 struct regcache *regs, struct displaced_step_closure *dsc)
5183 rd_val = displaced_read_reg (regs, dsc, 0);
5185 for (i = 0; i < 3; i++)
5186 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5188 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5192 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5193 struct displaced_step_closure *dsc,
5194 unsigned int rd, unsigned int rn, unsigned int rm)
5196 ULONGEST rd_val, rn_val, rm_val;
5198 /* Instruction is of form:
5200 <op><cond> rd, [rn,] rm [, <shift>]
5204 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5205 r0, r1, r2 <- rd, rn, rm
5206 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5207 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5210 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5211 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5212 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5213 rd_val = displaced_read_reg (regs, dsc, rd);
5214 rn_val = displaced_read_reg (regs, dsc, rn);
5215 rm_val = displaced_read_reg (regs, dsc, rm);
5216 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5217 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5218 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5221 dsc->cleanup = &cleanup_alu_reg;
5225 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5226 struct displaced_step_closure *dsc)
5228 unsigned int op = bits (insn, 21, 24);
5229 int is_mov = (op == 0xd);
5231 if (!insn_references_pc (insn, 0x000ff00ful))
5232 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5234 if (debug_displaced)
5235 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5236 is_mov ? "move" : "ALU", (unsigned long) insn);
5239 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5241 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5243 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5249 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5250 struct regcache *regs,
5251 struct displaced_step_closure *dsc)
5255 rm = bits (insn, 3, 6);
5256 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5258 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5259 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5261 if (debug_displaced)
5262 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5263 (unsigned short) insn);
5265 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5267 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5272 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5275 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5276 struct regcache *regs,
5277 struct displaced_step_closure *dsc)
5279 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5282 for (i = 0; i < 4; i++)
5283 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5285 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5289 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5290 struct displaced_step_closure *dsc,
5291 unsigned int rd, unsigned int rn, unsigned int rm,
5295 ULONGEST rd_val, rn_val, rm_val, rs_val;
5297 /* Instruction is of form:
5299 <op><cond> rd, [rn,] rm, <shift> rs
5303 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5304 r0, r1, r2, r3 <- rd, rn, rm, rs
5305 Insn: <op><cond> r0, r1, r2, <shift> r3
5307 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5311 for (i = 0; i < 4; i++)
5312 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5314 rd_val = displaced_read_reg (regs, dsc, rd);
5315 rn_val = displaced_read_reg (regs, dsc, rn);
5316 rm_val = displaced_read_reg (regs, dsc, rm);
5317 rs_val = displaced_read_reg (regs, dsc, rs);
5318 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5319 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5320 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5321 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5323 dsc->cleanup = &cleanup_alu_shifted_reg;
5327 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5328 struct regcache *regs,
5329 struct displaced_step_closure *dsc)
5331 unsigned int op = bits (insn, 21, 24);
5332 int is_mov = (op == 0xd);
5333 unsigned int rd, rn, rm, rs;
5335 if (!insn_references_pc (insn, 0x000fff0ful))
5336 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5338 if (debug_displaced)
5339 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5340 "%.8lx\n", is_mov ? "move" : "ALU",
5341 (unsigned long) insn);
5343 rn = bits (insn, 16, 19);
5344 rm = bits (insn, 0, 3);
5345 rs = bits (insn, 8, 11);
5346 rd = bits (insn, 12, 15);
5349 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5351 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5353 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5358 /* Clean up load instructions. */
5361 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5362 struct displaced_step_closure *dsc)
5364 ULONGEST rt_val, rt_val2 = 0, rn_val;
5366 rt_val = displaced_read_reg (regs, dsc, 0);
5367 if (dsc->u.ldst.xfersize == 8)
5368 rt_val2 = displaced_read_reg (regs, dsc, 1);
5369 rn_val = displaced_read_reg (regs, dsc, 2);
5371 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5372 if (dsc->u.ldst.xfersize > 4)
5373 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5374 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5375 if (!dsc->u.ldst.immed)
5376 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5378 /* Handle register writeback. */
5379 if (dsc->u.ldst.writeback)
5380 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5381 /* Put result in right place. */
5382 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5383 if (dsc->u.ldst.xfersize == 8)
5384 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5387 /* Clean up store instructions. */
5390 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5391 struct displaced_step_closure *dsc)
5393 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5395 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5396 if (dsc->u.ldst.xfersize > 4)
5397 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5398 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5399 if (!dsc->u.ldst.immed)
5400 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5401 if (!dsc->u.ldst.restore_r4)
5402 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5405 if (dsc->u.ldst.writeback)
5406 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5409 /* Copy "extra" load/store instructions. These are halfword/doubleword
5410 transfers, which have a different encoding to byte/word transfers. */
5413 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5414 struct regcache *regs, struct displaced_step_closure *dsc)
5416 unsigned int op1 = bits (insn, 20, 24);
5417 unsigned int op2 = bits (insn, 5, 6);
5418 unsigned int rt = bits (insn, 12, 15);
5419 unsigned int rn = bits (insn, 16, 19);
5420 unsigned int rm = bits (insn, 0, 3);
5421 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5422 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5423 int immed = (op1 & 0x4) != 0;
5425 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5427 if (!insn_references_pc (insn, 0x000ff00ful))
5428 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5430 if (debug_displaced)
5431 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5432 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5433 (unsigned long) insn);
5435 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5438 internal_error (__FILE__, __LINE__,
5439 _("copy_extra_ld_st: instruction decode error"));
5441 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5442 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5443 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5445 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5447 rt_val = displaced_read_reg (regs, dsc, rt);
5448 if (bytesize[opcode] == 8)
5449 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5450 rn_val = displaced_read_reg (regs, dsc, rn);
5452 rm_val = displaced_read_reg (regs, dsc, rm);
5454 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5455 if (bytesize[opcode] == 8)
5456 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5457 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5462 dsc->u.ldst.xfersize = bytesize[opcode];
5463 dsc->u.ldst.rn = rn;
5464 dsc->u.ldst.immed = immed;
5465 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5466 dsc->u.ldst.restore_r4 = 0;
5469 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5471 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5472 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5474 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5476 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5477 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5479 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5484 /* Copy byte/half word/word loads and stores. */
5487 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5488 struct displaced_step_closure *dsc, int load,
5489 int immed, int writeback, int size, int usermode,
5490 int rt, int rm, int rn)
5492 ULONGEST rt_val, rn_val, rm_val = 0;
5494 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5495 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5497 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5499 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5501 rt_val = displaced_read_reg (regs, dsc, rt);
5502 rn_val = displaced_read_reg (regs, dsc, rn);
5504 rm_val = displaced_read_reg (regs, dsc, rm);
5506 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5507 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5511 dsc->u.ldst.xfersize = size;
5512 dsc->u.ldst.rn = rn;
5513 dsc->u.ldst.immed = immed;
5514 dsc->u.ldst.writeback = writeback;
5516 /* To write PC we can do:
5518 Before this sequence of instructions:
5519 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5520 r2 is the Rn value got from dispalced_read_reg.
5522 Insn1: push {pc} Write address of STR instruction + offset on stack
5523 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5524 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5525 = addr(Insn1) + offset - addr(Insn3) - 8
5527 Insn4: add r4, r4, #8 r4 = offset - 8
5528 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5530 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5532 Otherwise we don't know what value to write for PC, since the offset is
5533 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5534 of this can be found in Section "Saving from r15" in
5535 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5537 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5542 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5543 uint16_t insn2, struct regcache *regs,
5544 struct displaced_step_closure *dsc, int size)
5546 unsigned int u_bit = bit (insn1, 7);
5547 unsigned int rt = bits (insn2, 12, 15);
5548 int imm12 = bits (insn2, 0, 11);
5551 if (debug_displaced)
5552 fprintf_unfiltered (gdb_stdlog,
5553 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5554 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5560 /* Rewrite instruction LDR Rt imm12 into:
5562 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5566 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5569 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5570 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5571 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5573 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5575 pc_val = pc_val & 0xfffffffc;
5577 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5578 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5582 dsc->u.ldst.xfersize = size;
5583 dsc->u.ldst.immed = 0;
5584 dsc->u.ldst.writeback = 0;
5585 dsc->u.ldst.restore_r4 = 0;
5587 /* LDR R0, R2, R3 */
5588 dsc->modinsn[0] = 0xf852;
5589 dsc->modinsn[1] = 0x3;
5592 dsc->cleanup = &cleanup_load;
5598 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5599 uint16_t insn2, struct regcache *regs,
5600 struct displaced_step_closure *dsc,
5601 int writeback, int immed)
5603 unsigned int rt = bits (insn2, 12, 15);
5604 unsigned int rn = bits (insn1, 0, 3);
5605 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5606 /* In LDR (register), there is also a register Rm, which is not allowed to
5607 be PC, so we don't have to check it. */
5609 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5610 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5613 if (debug_displaced)
5614 fprintf_unfiltered (gdb_stdlog,
5615 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5616 rt, rn, insn1, insn2);
5618 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5621 dsc->u.ldst.restore_r4 = 0;
5624 /* ldr[b]<cond> rt, [rn, #imm], etc.
5626 ldr[b]<cond> r0, [r2, #imm]. */
5628 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5629 dsc->modinsn[1] = insn2 & 0x0fff;
5632 /* ldr[b]<cond> rt, [rn, rm], etc.
5634 ldr[b]<cond> r0, [r2, r3]. */
5636 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5637 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5647 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5648 struct regcache *regs,
5649 struct displaced_step_closure *dsc,
5650 int load, int size, int usermode)
5652 int immed = !bit (insn, 25);
5653 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5654 unsigned int rt = bits (insn, 12, 15);
5655 unsigned int rn = bits (insn, 16, 19);
5656 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5658 if (!insn_references_pc (insn, 0x000ff00ful))
5659 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5661 if (debug_displaced)
5662 fprintf_unfiltered (gdb_stdlog,
5663 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5664 load ? (size == 1 ? "ldrb" : "ldr")
5665 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5667 (unsigned long) insn);
5669 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5670 usermode, rt, rm, rn);
5672 if (load || rt != ARM_PC_REGNUM)
5674 dsc->u.ldst.restore_r4 = 0;
5677 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5679 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5680 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5682 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5684 {ldr,str}[b]<cond> r0, [r2, r3]. */
5685 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5689 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5690 dsc->u.ldst.restore_r4 = 1;
5691 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5692 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5693 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5694 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5695 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5699 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5701 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5706 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5711 /* Cleanup LDM instructions with fully-populated register list. This is an
5712 unfortunate corner case: it's impossible to implement correctly by modifying
5713 the instruction. The issue is as follows: we have an instruction,
5717 which we must rewrite to avoid loading PC. A possible solution would be to
5718 do the load in two halves, something like (with suitable cleanup
5722 ldm[id][ab] r8!, {r0-r7}
5724 ldm[id][ab] r8, {r7-r14}
5727 but at present there's no suitable place for <temp>, since the scratch space
5728 is overwritten before the cleanup routine is called. For now, we simply
5729 emulate the instruction. */
5732 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5733 struct displaced_step_closure *dsc)
5735 int inc = dsc->u.block.increment;
5736 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5737 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5738 uint32_t regmask = dsc->u.block.regmask;
5739 int regno = inc ? 0 : 15;
5740 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5741 int exception_return = dsc->u.block.load && dsc->u.block.user
5742 && (regmask & 0x8000) != 0;
5743 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5744 int do_transfer = condition_true (dsc->u.block.cond, status);
5745 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5750 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5751 sensible we can do here. Complain loudly. */
5752 if (exception_return)
5753 error (_("Cannot single-step exception return"));
5755 /* We don't handle any stores here for now. */
5756 gdb_assert (dsc->u.block.load != 0);
5758 if (debug_displaced)
5759 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5760 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5761 dsc->u.block.increment ? "inc" : "dec",
5762 dsc->u.block.before ? "before" : "after");
5769 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5772 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5775 xfer_addr += bump_before;
5777 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5778 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5780 xfer_addr += bump_after;
5782 regmask &= ~(1 << regno);
5785 if (dsc->u.block.writeback)
5786 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5790 /* Clean up an STM which included the PC in the register list. */
5793 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5794 struct displaced_step_closure *dsc)
5796 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5797 int store_executed = condition_true (dsc->u.block.cond, status);
5798 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5799 CORE_ADDR stm_insn_addr;
5802 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5804 /* If condition code fails, there's nothing else to do. */
5805 if (!store_executed)
5808 if (dsc->u.block.increment)
5810 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5812 if (dsc->u.block.before)
5817 pc_stored_at = dsc->u.block.xfer_addr;
5819 if (dsc->u.block.before)
5823 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5824 stm_insn_addr = dsc->scratch_base;
5825 offset = pc_val - stm_insn_addr;
5827 if (debug_displaced)
5828 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5829 "STM instruction\n", offset);
5831 /* Rewrite the stored PC to the proper value for the non-displaced original
5833 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5834 dsc->insn_addr + offset);
5837 /* Clean up an LDM which includes the PC in the register list. We clumped all
5838 the registers in the transferred list into a contiguous range r0...rX (to
5839 avoid loading PC directly and losing control of the debugged program), so we
5840 must undo that here. */
5843 cleanup_block_load_pc (struct gdbarch *gdbarch,
5844 struct regcache *regs,
5845 struct displaced_step_closure *dsc)
5847 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5848 int load_executed = condition_true (dsc->u.block.cond, status);
5849 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5850 unsigned int regs_loaded = bitcount (mask);
5851 unsigned int num_to_shuffle = regs_loaded, clobbered;
5853 /* The method employed here will fail if the register list is fully populated
5854 (we need to avoid loading PC directly). */
5855 gdb_assert (num_to_shuffle < 16);
5860 clobbered = (1 << num_to_shuffle) - 1;
5862 while (num_to_shuffle > 0)
5864 if ((mask & (1 << write_reg)) != 0)
5866 unsigned int read_reg = num_to_shuffle - 1;
5868 if (read_reg != write_reg)
5870 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5871 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5872 if (debug_displaced)
5873 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5874 "loaded register r%d to r%d\n"), read_reg,
5877 else if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5879 "r%d already in the right place\n"),
5882 clobbered &= ~(1 << write_reg);
5890 /* Restore any registers we scribbled over. */
5891 for (write_reg = 0; clobbered != 0; write_reg++)
5893 if ((clobbered & (1 << write_reg)) != 0)
5895 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5897 if (debug_displaced)
5898 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5899 "clobbered register r%d\n"), write_reg);
5900 clobbered &= ~(1 << write_reg);
5904 /* Perform register writeback manually. */
5905 if (dsc->u.block.writeback)
5907 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5909 if (dsc->u.block.increment)
5910 new_rn_val += regs_loaded * 4;
5912 new_rn_val -= regs_loaded * 4;
5914 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5919 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5920 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5923 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5924 struct regcache *regs,
5925 struct displaced_step_closure *dsc)
5927 int load = bit (insn, 20);
5928 int user = bit (insn, 22);
5929 int increment = bit (insn, 23);
5930 int before = bit (insn, 24);
5931 int writeback = bit (insn, 21);
5932 int rn = bits (insn, 16, 19);
5934 /* Block transfers which don't mention PC can be run directly
5936 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5937 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5939 if (rn == ARM_PC_REGNUM)
5941 warning (_("displaced: Unpredictable LDM or STM with "
5942 "base register r15"));
5943 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5946 if (debug_displaced)
5947 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5948 "%.8lx\n", (unsigned long) insn);
5950 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5951 dsc->u.block.rn = rn;
5953 dsc->u.block.load = load;
5954 dsc->u.block.user = user;
5955 dsc->u.block.increment = increment;
5956 dsc->u.block.before = before;
5957 dsc->u.block.writeback = writeback;
5958 dsc->u.block.cond = bits (insn, 28, 31);
5960 dsc->u.block.regmask = insn & 0xffff;
5964 if ((insn & 0xffff) == 0xffff)
5966 /* LDM with a fully-populated register list. This case is
5967 particularly tricky. Implement for now by fully emulating the
5968 instruction (which might not behave perfectly in all cases, but
5969 these instructions should be rare enough for that not to matter
5971 dsc->modinsn[0] = ARM_NOP;
5973 dsc->cleanup = &cleanup_block_load_all;
5977 /* LDM of a list of registers which includes PC. Implement by
5978 rewriting the list of registers to be transferred into a
5979 contiguous chunk r0...rX before doing the transfer, then shuffling
5980 registers into the correct places in the cleanup routine. */
5981 unsigned int regmask = insn & 0xffff;
5982 unsigned int num_in_list = bitcount (regmask), new_regmask;
5985 for (i = 0; i < num_in_list; i++)
5986 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5988 /* Writeback makes things complicated. We need to avoid clobbering
5989 the base register with one of the registers in our modified
5990 register list, but just using a different register can't work in
5993 ldm r14!, {r0-r13,pc}
5995 which would need to be rewritten as:
5999 but that can't work, because there's no free register for N.
6001 Solve this by turning off the writeback bit, and emulating
6002 writeback manually in the cleanup routine. */
6007 new_regmask = (1 << num_in_list) - 1;
6009 if (debug_displaced)
6010 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6011 "{..., pc}: original reg list %.4x, modified "
6012 "list %.4x\n"), rn, writeback ? "!" : "",
6013 (int) insn & 0xffff, new_regmask);
6015 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6017 dsc->cleanup = &cleanup_block_load_pc;
6022 /* STM of a list of registers which includes PC. Run the instruction
6023 as-is, but out of line: this will store the wrong value for the PC,
6024 so we must manually fix up the memory in the cleanup routine.
6025 Doing things this way has the advantage that we can auto-detect
6026 the offset of the PC write (which is architecture-dependent) in
6027 the cleanup routine. */
6028 dsc->modinsn[0] = insn;
6030 dsc->cleanup = &cleanup_block_store_pc;
6037 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6038 struct regcache *regs,
6039 struct displaced_step_closure *dsc)
6041 int rn = bits (insn1, 0, 3);
6042 int load = bit (insn1, 4);
6043 int writeback = bit (insn1, 5);
6045 /* Block transfers which don't mention PC can be run directly
6047 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6048 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6050 if (rn == ARM_PC_REGNUM)
6052 warning (_("displaced: Unpredictable LDM or STM with "
6053 "base register r15"));
6054 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6055 "unpredictable ldm/stm", dsc);
6058 if (debug_displaced)
6059 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6060 "%.4x%.4x\n", insn1, insn2);
6062 /* Clear bit 13, since it should be always zero. */
6063 dsc->u.block.regmask = (insn2 & 0xdfff);
6064 dsc->u.block.rn = rn;
6066 dsc->u.block.load = load;
6067 dsc->u.block.user = 0;
6068 dsc->u.block.increment = bit (insn1, 7);
6069 dsc->u.block.before = bit (insn1, 8);
6070 dsc->u.block.writeback = writeback;
6071 dsc->u.block.cond = INST_AL;
6072 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6076 if (dsc->u.block.regmask == 0xffff)
6078 /* This branch is impossible to happen. */
6083 unsigned int regmask = dsc->u.block.regmask;
6084 unsigned int num_in_list = bitcount (regmask), new_regmask;
6087 for (i = 0; i < num_in_list; i++)
6088 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6093 new_regmask = (1 << num_in_list) - 1;
6095 if (debug_displaced)
6096 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6097 "{..., pc}: original reg list %.4x, modified "
6098 "list %.4x\n"), rn, writeback ? "!" : "",
6099 (int) dsc->u.block.regmask, new_regmask);
6101 dsc->modinsn[0] = insn1;
6102 dsc->modinsn[1] = (new_regmask & 0xffff);
6105 dsc->cleanup = &cleanup_block_load_pc;
6110 dsc->modinsn[0] = insn1;
6111 dsc->modinsn[1] = insn2;
6113 dsc->cleanup = &cleanup_block_store_pc;
6118 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6119 This is used to avoid a dependency on BFD's bfd_endian enum. */
6122 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6125 return read_memory_unsigned_integer (memaddr, len,
6126 (enum bfd_endian) byte_order);
6129 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6132 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6135 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6138 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6141 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6146 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6149 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6151 return arm_is_thumb (self->regcache);
6154 /* single_step() is called just before we want to resume the inferior,
6155 if we want to single-step it but there is no hardware or kernel
6156 single-step support. We find the target of the coming instructions
6157 and breakpoint them. */
6160 arm_software_single_step (struct frame_info *frame)
6162 struct regcache *regcache = get_current_regcache ();
6163 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6164 struct address_space *aspace = get_regcache_aspace (regcache);
6165 struct arm_get_next_pcs next_pcs_ctx;
6168 VEC (CORE_ADDR) *next_pcs = NULL;
6169 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6171 arm_get_next_pcs_ctor (&next_pcs_ctx,
6172 &arm_get_next_pcs_ops,
6173 gdbarch_byte_order (gdbarch),
6174 gdbarch_byte_order_for_code (gdbarch),
6178 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6180 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6181 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6183 do_cleanups (old_chain);
6188 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6189 for Linux, where some SVC instructions must be treated specially. */
6192 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6193 struct displaced_step_closure *dsc)
6195 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6197 if (debug_displaced)
6198 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6199 "%.8lx\n", (unsigned long) resume_addr);
6201 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6205 /* Common copy routine for svc instruciton. */
6208 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6209 struct displaced_step_closure *dsc)
6211 /* Preparation: none.
6212 Insn: unmodified svc.
6213 Cleanup: pc <- insn_addr + insn_size. */
6215 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6217 dsc->wrote_to_pc = 1;
6219 /* Allow OS-specific code to override SVC handling. */
6220 if (dsc->u.svc.copy_svc_os)
6221 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6224 dsc->cleanup = &cleanup_svc;
6230 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6231 struct regcache *regs, struct displaced_step_closure *dsc)
6234 if (debug_displaced)
6235 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6236 (unsigned long) insn);
6238 dsc->modinsn[0] = insn;
6240 return install_svc (gdbarch, regs, dsc);
6244 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6245 struct regcache *regs, struct displaced_step_closure *dsc)
6248 if (debug_displaced)
6249 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6252 dsc->modinsn[0] = insn;
6254 return install_svc (gdbarch, regs, dsc);
6257 /* Copy undefined instructions. */
6260 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6261 struct displaced_step_closure *dsc)
6263 if (debug_displaced)
6264 fprintf_unfiltered (gdb_stdlog,
6265 "displaced: copying undefined insn %.8lx\n",
6266 (unsigned long) insn);
6268 dsc->modinsn[0] = insn;
6274 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6275 struct displaced_step_closure *dsc)
6278 if (debug_displaced)
6279 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6280 "%.4x %.4x\n", (unsigned short) insn1,
6281 (unsigned short) insn2);
6283 dsc->modinsn[0] = insn1;
6284 dsc->modinsn[1] = insn2;
6290 /* Copy unpredictable instructions. */
6293 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6294 struct displaced_step_closure *dsc)
6296 if (debug_displaced)
6297 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6298 "%.8lx\n", (unsigned long) insn);
6300 dsc->modinsn[0] = insn;
6305 /* The decode_* functions are instruction decoding helpers. They mostly follow
6306 the presentation in the ARM ARM. */
6309 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6310 struct regcache *regs,
6311 struct displaced_step_closure *dsc)
6313 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6314 unsigned int rn = bits (insn, 16, 19);
6316 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6317 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6318 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6319 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6320 else if ((op1 & 0x60) == 0x20)
6321 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6322 else if ((op1 & 0x71) == 0x40)
6323 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6325 else if ((op1 & 0x77) == 0x41)
6326 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6327 else if ((op1 & 0x77) == 0x45)
6328 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6329 else if ((op1 & 0x77) == 0x51)
6332 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6334 return arm_copy_unpred (gdbarch, insn, dsc);
6336 else if ((op1 & 0x77) == 0x55)
6337 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6338 else if (op1 == 0x57)
6341 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6342 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6343 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6344 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6345 default: return arm_copy_unpred (gdbarch, insn, dsc);
6347 else if ((op1 & 0x63) == 0x43)
6348 return arm_copy_unpred (gdbarch, insn, dsc);
6349 else if ((op2 & 0x1) == 0x0)
6350 switch (op1 & ~0x80)
6353 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6356 case 0x71: case 0x75:
6358 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6359 case 0x63: case 0x67: case 0x73: case 0x77:
6360 return arm_copy_unpred (gdbarch, insn, dsc);
6362 return arm_copy_undef (gdbarch, insn, dsc);
6365 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6369 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6370 struct regcache *regs,
6371 struct displaced_step_closure *dsc)
6373 if (bit (insn, 27) == 0)
6374 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6375 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6376 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6379 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6382 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6384 case 0x4: case 0x5: case 0x6: case 0x7:
6385 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6388 switch ((insn & 0xe00000) >> 21)
6390 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6392 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6395 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6398 return arm_copy_undef (gdbarch, insn, dsc);
6403 int rn_f = (bits (insn, 16, 19) == 0xf);
6404 switch ((insn & 0xe00000) >> 21)
6407 /* ldc/ldc2 imm (undefined for rn == pc). */
6408 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6409 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6412 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6414 case 0x4: case 0x5: case 0x6: case 0x7:
6415 /* ldc/ldc2 lit (undefined for rn != pc). */
6416 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6417 : arm_copy_undef (gdbarch, insn, dsc);
6420 return arm_copy_undef (gdbarch, insn, dsc);
6425 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6428 if (bits (insn, 16, 19) == 0xf)
6430 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6432 return arm_copy_undef (gdbarch, insn, dsc);
6436 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6438 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6442 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6444 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6447 return arm_copy_undef (gdbarch, insn, dsc);
6451 /* Decode miscellaneous instructions in dp/misc encoding space. */
6454 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6455 struct regcache *regs,
6456 struct displaced_step_closure *dsc)
6458 unsigned int op2 = bits (insn, 4, 6);
6459 unsigned int op = bits (insn, 21, 22);
6464 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6467 if (op == 0x1) /* bx. */
6468 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6470 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6472 return arm_copy_undef (gdbarch, insn, dsc);
6476 /* Not really supported. */
6477 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6479 return arm_copy_undef (gdbarch, insn, dsc);
6483 return arm_copy_bx_blx_reg (gdbarch, insn,
6484 regs, dsc); /* blx register. */
6486 return arm_copy_undef (gdbarch, insn, dsc);
6489 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6493 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6495 /* Not really supported. */
6496 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6499 return arm_copy_undef (gdbarch, insn, dsc);
6504 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6505 struct regcache *regs,
6506 struct displaced_step_closure *dsc)
6509 switch (bits (insn, 20, 24))
6512 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6515 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6517 case 0x12: case 0x16:
6518 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6521 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6525 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6527 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6528 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6530 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6532 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6533 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6534 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6537 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6538 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6539 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6540 /* 2nd arg means "unprivileged". */
6541 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6545 /* Should be unreachable. */
6550 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6551 struct regcache *regs,
6552 struct displaced_step_closure *dsc)
6554 int a = bit (insn, 25), b = bit (insn, 4);
6555 uint32_t op1 = bits (insn, 20, 24);
6557 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6558 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6559 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6560 else if ((!a && (op1 & 0x17) == 0x02)
6561 || (a && (op1 & 0x17) == 0x02 && !b))
6562 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6563 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6564 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6565 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6566 else if ((!a && (op1 & 0x17) == 0x03)
6567 || (a && (op1 & 0x17) == 0x03 && !b))
6568 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6569 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6570 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6571 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6572 else if ((!a && (op1 & 0x17) == 0x06)
6573 || (a && (op1 & 0x17) == 0x06 && !b))
6574 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6575 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6576 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6577 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6578 else if ((!a && (op1 & 0x17) == 0x07)
6579 || (a && (op1 & 0x17) == 0x07 && !b))
6580 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6582 /* Should be unreachable. */
6587 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6588 struct displaced_step_closure *dsc)
6590 switch (bits (insn, 20, 24))
6592 case 0x00: case 0x01: case 0x02: case 0x03:
6593 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6595 case 0x04: case 0x05: case 0x06: case 0x07:
6596 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6598 case 0x08: case 0x09: case 0x0a: case 0x0b:
6599 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6600 return arm_copy_unmodified (gdbarch, insn,
6601 "decode/pack/unpack/saturate/reverse", dsc);
6604 if (bits (insn, 5, 7) == 0) /* op2. */
6606 if (bits (insn, 12, 15) == 0xf)
6607 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6609 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6612 return arm_copy_undef (gdbarch, insn, dsc);
6614 case 0x1a: case 0x1b:
6615 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6616 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6618 return arm_copy_undef (gdbarch, insn, dsc);
6620 case 0x1c: case 0x1d:
6621 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6623 if (bits (insn, 0, 3) == 0xf)
6624 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6626 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6629 return arm_copy_undef (gdbarch, insn, dsc);
6631 case 0x1e: case 0x1f:
6632 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6633 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6635 return arm_copy_undef (gdbarch, insn, dsc);
6638 /* Should be unreachable. */
6643 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6644 struct regcache *regs,
6645 struct displaced_step_closure *dsc)
6648 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6650 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6654 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6655 struct regcache *regs,
6656 struct displaced_step_closure *dsc)
6658 unsigned int opcode = bits (insn, 20, 24);
6662 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6663 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6665 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6666 case 0x12: case 0x16:
6667 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6669 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6670 case 0x13: case 0x17:
6671 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6673 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6674 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6675 /* Note: no writeback for these instructions. Bit 25 will always be
6676 zero though (via caller), so the following works OK. */
6677 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6680 /* Should be unreachable. */
6684 /* Decode shifted register instructions. */
6687 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6688 uint16_t insn2, struct regcache *regs,
6689 struct displaced_step_closure *dsc)
6691 /* PC is only allowed to be used in instruction MOV. */
6693 unsigned int op = bits (insn1, 5, 8);
6694 unsigned int rn = bits (insn1, 0, 3);
6696 if (op == 0x2 && rn == 0xf) /* MOV */
6697 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6699 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6700 "dp (shift reg)", dsc);
6704 /* Decode extension register load/store. Exactly the same as
6705 arm_decode_ext_reg_ld_st. */
6708 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6709 uint16_t insn2, struct regcache *regs,
6710 struct displaced_step_closure *dsc)
6712 unsigned int opcode = bits (insn1, 4, 8);
6716 case 0x04: case 0x05:
6717 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6718 "vfp/neon vmov", dsc);
6720 case 0x08: case 0x0c: /* 01x00 */
6721 case 0x0a: case 0x0e: /* 01x10 */
6722 case 0x12: case 0x16: /* 10x10 */
6723 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6724 "vfp/neon vstm/vpush", dsc);
6726 case 0x09: case 0x0d: /* 01x01 */
6727 case 0x0b: case 0x0f: /* 01x11 */
6728 case 0x13: case 0x17: /* 10x11 */
6729 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6730 "vfp/neon vldm/vpop", dsc);
6732 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6733 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6735 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6736 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6739 /* Should be unreachable. */
6744 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6745 struct regcache *regs, struct displaced_step_closure *dsc)
6747 unsigned int op1 = bits (insn, 20, 25);
6748 int op = bit (insn, 4);
6749 unsigned int coproc = bits (insn, 8, 11);
6751 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6752 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6753 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6754 && (coproc & 0xe) != 0xa)
6756 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6757 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6758 && (coproc & 0xe) != 0xa)
6759 /* ldc/ldc2 imm/lit. */
6760 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6761 else if ((op1 & 0x3e) == 0x00)
6762 return arm_copy_undef (gdbarch, insn, dsc);
6763 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6765 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6767 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6768 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6769 else if ((op1 & 0x30) == 0x20 && !op)
6771 if ((coproc & 0xe) == 0xa)
6772 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6774 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6776 else if ((op1 & 0x30) == 0x20 && op)
6777 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6778 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6780 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6781 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6782 else if ((op1 & 0x30) == 0x30)
6783 return arm_copy_svc (gdbarch, insn, regs, dsc);
6785 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6789 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6790 uint16_t insn2, struct regcache *regs,
6791 struct displaced_step_closure *dsc)
6793 unsigned int coproc = bits (insn2, 8, 11);
6794 unsigned int bit_5_8 = bits (insn1, 5, 8);
6795 unsigned int bit_9 = bit (insn1, 9);
6796 unsigned int bit_4 = bit (insn1, 4);
6801 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6802 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6804 else if (bit_5_8 == 0) /* UNDEFINED. */
6805 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6808 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6809 if ((coproc & 0xe) == 0xa)
6810 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6812 else /* coproc is not 101x. */
6814 if (bit_4 == 0) /* STC/STC2. */
6815 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 else /* LDC/LDC2 {literal, immeidate}. */
6818 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6830 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6831 struct displaced_step_closure *dsc, int rd)
6837 Preparation: Rd <- PC
6843 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6844 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6848 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6849 struct displaced_step_closure *dsc,
6850 int rd, unsigned int imm)
6853 /* Encoding T2: ADDS Rd, #imm */
6854 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6856 install_pc_relative (gdbarch, regs, dsc, rd);
6862 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6863 struct regcache *regs,
6864 struct displaced_step_closure *dsc)
6866 unsigned int rd = bits (insn, 8, 10);
6867 unsigned int imm8 = bits (insn, 0, 7);
6869 if (debug_displaced)
6870 fprintf_unfiltered (gdb_stdlog,
6871 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6874 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6878 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6879 uint16_t insn2, struct regcache *regs,
6880 struct displaced_step_closure *dsc)
6882 unsigned int rd = bits (insn2, 8, 11);
6883 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6884 extract raw immediate encoding rather than computing immediate. When
6885 generating ADD or SUB instruction, we can simply perform OR operation to
6886 set immediate into ADD. */
6887 unsigned int imm_3_8 = insn2 & 0x70ff;
6888 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6890 if (debug_displaced)
6891 fprintf_unfiltered (gdb_stdlog,
6892 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6893 rd, imm_i, imm_3_8, insn1, insn2);
6895 if (bit (insn1, 7)) /* Encoding T2 */
6897 /* Encoding T3: SUB Rd, Rd, #imm */
6898 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6899 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6901 else /* Encoding T3 */
6903 /* Encoding T3: ADD Rd, Rd, #imm */
6904 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6905 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6909 install_pc_relative (gdbarch, regs, dsc, rd);
6915 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6916 struct regcache *regs,
6917 struct displaced_step_closure *dsc)
6919 unsigned int rt = bits (insn1, 8, 10);
6921 int imm8 = (bits (insn1, 0, 7) << 2);
6927 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6929 Insn: LDR R0, [R2, R3];
6930 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6932 if (debug_displaced)
6933 fprintf_unfiltered (gdb_stdlog,
6934 "displaced: copying thumb ldr r%d [pc #%d]\n"
6937 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6938 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6939 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6940 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6941 /* The assembler calculates the required value of the offset from the
6942 Align(PC,4) value of this instruction to the label. */
6943 pc = pc & 0xfffffffc;
6945 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6946 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6949 dsc->u.ldst.xfersize = 4;
6951 dsc->u.ldst.immed = 0;
6952 dsc->u.ldst.writeback = 0;
6953 dsc->u.ldst.restore_r4 = 0;
6955 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6957 dsc->cleanup = &cleanup_load;
6962 /* Copy Thumb cbnz/cbz insruction. */
6965 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6966 struct regcache *regs,
6967 struct displaced_step_closure *dsc)
6969 int non_zero = bit (insn1, 11);
6970 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6971 CORE_ADDR from = dsc->insn_addr;
6972 int rn = bits (insn1, 0, 2);
6973 int rn_val = displaced_read_reg (regs, dsc, rn);
6975 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6976 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6977 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6978 condition is false, let it be, cleanup_branch will do nothing. */
6979 if (dsc->u.branch.cond)
6981 dsc->u.branch.cond = INST_AL;
6982 dsc->u.branch.dest = from + 4 + imm5;
6985 dsc->u.branch.dest = from + 2;
6987 dsc->u.branch.link = 0;
6988 dsc->u.branch.exchange = 0;
6990 if (debug_displaced)
6991 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6992 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6993 rn, rn_val, insn1, dsc->u.branch.dest);
6995 dsc->modinsn[0] = THUMB_NOP;
6997 dsc->cleanup = &cleanup_branch;
7001 /* Copy Table Branch Byte/Halfword */
7003 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7004 uint16_t insn2, struct regcache *regs,
7005 struct displaced_step_closure *dsc)
7007 ULONGEST rn_val, rm_val;
7008 int is_tbh = bit (insn2, 4);
7009 CORE_ADDR halfwords = 0;
7010 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7012 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7013 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7019 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7020 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7026 target_read_memory (rn_val + rm_val, buf, 1);
7027 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7030 if (debug_displaced)
7031 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7032 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7033 (unsigned int) rn_val, (unsigned int) rm_val,
7034 (unsigned int) halfwords);
7036 dsc->u.branch.cond = INST_AL;
7037 dsc->u.branch.link = 0;
7038 dsc->u.branch.exchange = 0;
7039 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7041 dsc->cleanup = &cleanup_branch;
7047 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7048 struct displaced_step_closure *dsc)
7051 int val = displaced_read_reg (regs, dsc, 7);
7052 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7055 val = displaced_read_reg (regs, dsc, 8);
7056 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7059 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7064 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7065 struct regcache *regs,
7066 struct displaced_step_closure *dsc)
7068 dsc->u.block.regmask = insn1 & 0x00ff;
7070 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7073 (1) register list is full, that is, r0-r7 are used.
7074 Prepare: tmp[0] <- r8
7076 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7077 MOV r8, r7; Move value of r7 to r8;
7078 POP {r7}; Store PC value into r7.
7080 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7082 (2) register list is not full, supposing there are N registers in
7083 register list (except PC, 0 <= N <= 7).
7084 Prepare: for each i, 0 - N, tmp[i] <- ri.
7086 POP {r0, r1, ...., rN};
7088 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7089 from tmp[] properly.
7091 if (debug_displaced)
7092 fprintf_unfiltered (gdb_stdlog,
7093 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7094 dsc->u.block.regmask, insn1);
7096 if (dsc->u.block.regmask == 0xff)
7098 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7100 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7101 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7102 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7105 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7109 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7111 unsigned int new_regmask;
7113 for (i = 0; i < num_in_list + 1; i++)
7114 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7116 new_regmask = (1 << (num_in_list + 1)) - 1;
7118 if (debug_displaced)
7119 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7120 "{..., pc}: original reg list %.4x,"
7121 " modified list %.4x\n"),
7122 (int) dsc->u.block.regmask, new_regmask);
7124 dsc->u.block.regmask |= 0x8000;
7125 dsc->u.block.writeback = 0;
7126 dsc->u.block.cond = INST_AL;
7128 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7130 dsc->cleanup = &cleanup_block_load_pc;
7137 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7138 struct regcache *regs,
7139 struct displaced_step_closure *dsc)
7141 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7142 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7145 /* 16-bit thumb instructions. */
7146 switch (op_bit_12_15)
7148 /* Shift (imme), add, subtract, move and compare. */
7149 case 0: case 1: case 2: case 3:
7150 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7151 "shift/add/sub/mov/cmp",
7155 switch (op_bit_10_11)
7157 case 0: /* Data-processing */
7158 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7162 case 1: /* Special data instructions and branch and exchange. */
7164 unsigned short op = bits (insn1, 7, 9);
7165 if (op == 6 || op == 7) /* BX or BLX */
7166 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7167 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7168 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7170 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7174 default: /* LDR (literal) */
7175 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7178 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7179 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7182 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7183 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7184 else /* Generate SP-relative address */
7185 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7187 case 11: /* Misc 16-bit instructions */
7189 switch (bits (insn1, 8, 11))
7191 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7192 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7194 case 12: case 13: /* POP */
7195 if (bit (insn1, 8)) /* PC is in register list. */
7196 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7198 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7200 case 15: /* If-Then, and hints */
7201 if (bits (insn1, 0, 3))
7202 /* If-Then makes up to four following instructions conditional.
7203 IT instruction itself is not conditional, so handle it as a
7204 common unmodified instruction. */
7205 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7211 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7216 if (op_bit_10_11 < 2) /* Store multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7218 else /* Load multiple registers */
7219 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7221 case 13: /* Conditional branch and supervisor call */
7222 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7223 err = thumb_copy_b (gdbarch, insn1, dsc);
7225 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7227 case 14: /* Unconditional branch */
7228 err = thumb_copy_b (gdbarch, insn1, dsc);
7235 internal_error (__FILE__, __LINE__,
7236 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7240 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7241 uint16_t insn1, uint16_t insn2,
7242 struct regcache *regs,
7243 struct displaced_step_closure *dsc)
7245 int rt = bits (insn2, 12, 15);
7246 int rn = bits (insn1, 0, 3);
7247 int op1 = bits (insn1, 7, 8);
7249 switch (bits (insn1, 5, 6))
7251 case 0: /* Load byte and memory hints */
7252 if (rt == 0xf) /* PLD/PLI */
7255 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7256 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7258 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7263 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7264 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7267 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7268 "ldrb{reg, immediate}/ldrbt",
7273 case 1: /* Load halfword and memory hints. */
7274 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7275 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7276 "pld/unalloc memhint", dsc);
7280 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7283 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7287 case 2: /* Load word */
7289 int insn2_bit_8_11 = bits (insn2, 8, 11);
7292 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7293 else if (op1 == 0x1) /* Encoding T3 */
7294 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7296 else /* op1 == 0x0 */
7298 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7299 /* LDR (immediate) */
7300 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7301 dsc, bit (insn2, 8), 1);
7302 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7303 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7306 /* LDR (register) */
7307 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7313 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7320 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7321 uint16_t insn2, struct regcache *regs,
7322 struct displaced_step_closure *dsc)
7325 unsigned short op = bit (insn2, 15);
7326 unsigned int op1 = bits (insn1, 11, 12);
7332 switch (bits (insn1, 9, 10))
7337 /* Load/store {dual, execlusive}, table branch. */
7338 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7339 && bits (insn2, 5, 7) == 0)
7340 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7343 /* PC is not allowed to use in load/store {dual, exclusive}
7345 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7346 "load/store dual/ex", dsc);
7348 else /* load/store multiple */
7350 switch (bits (insn1, 7, 8))
7352 case 0: case 3: /* SRS, RFE */
7353 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7356 case 1: case 2: /* LDM/STM/PUSH/POP */
7357 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7364 /* Data-processing (shift register). */
7365 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7368 default: /* Coprocessor instructions. */
7369 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7374 case 2: /* op1 = 2 */
7375 if (op) /* Branch and misc control. */
7377 if (bit (insn2, 14) /* BLX/BL */
7378 || bit (insn2, 12) /* Unconditional branch */
7379 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7380 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7382 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7389 int op = bits (insn1, 4, 8);
7390 int rn = bits (insn1, 0, 3);
7391 if ((op == 0 || op == 0xa) && rn == 0xf)
7392 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7395 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 else /* Data processing (modified immeidate) */
7399 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7403 case 3: /* op1 = 3 */
7404 switch (bits (insn1, 9, 10))
7408 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7410 else /* NEON Load/Store and Store single data item */
7411 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7412 "neon elt/struct load/store",
7415 case 1: /* op1 = 3, bits (9, 10) == 1 */
7416 switch (bits (insn1, 7, 8))
7418 case 0: case 1: /* Data processing (register) */
7419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 case 2: /* Multiply and absolute difference */
7423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "mul/mua/diff", dsc);
7426 case 3: /* Long multiply and divide */
7427 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7432 default: /* Coprocessor instructions */
7433 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7442 internal_error (__FILE__, __LINE__,
7443 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7448 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7449 struct regcache *regs,
7450 struct displaced_step_closure *dsc)
7452 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7454 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7456 if (debug_displaced)
7457 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7458 "at %.8lx\n", insn1, (unsigned long) from);
7461 dsc->insn_size = thumb_insn_size (insn1);
7462 if (thumb_insn_size (insn1) == 4)
7465 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7466 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7469 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7473 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7474 CORE_ADDR to, struct regcache *regs,
7475 struct displaced_step_closure *dsc)
7478 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7481 /* Most displaced instructions use a 1-instruction scratch space, so set this
7482 here and override below if/when necessary. */
7484 dsc->insn_addr = from;
7485 dsc->scratch_base = to;
7486 dsc->cleanup = NULL;
7487 dsc->wrote_to_pc = 0;
7489 if (!displaced_in_arm_mode (regs))
7490 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7494 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7495 if (debug_displaced)
7496 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7497 "at %.8lx\n", (unsigned long) insn,
7498 (unsigned long) from);
7500 if ((insn & 0xf0000000) == 0xf0000000)
7501 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7502 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7504 case 0x0: case 0x1: case 0x2: case 0x3:
7505 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7508 case 0x4: case 0x5: case 0x6:
7509 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7513 err = arm_decode_media (gdbarch, insn, dsc);
7516 case 0x8: case 0x9: case 0xa: case 0xb:
7517 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7520 case 0xc: case 0xd: case 0xe: case 0xf:
7521 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7526 internal_error (__FILE__, __LINE__,
7527 _("arm_process_displaced_insn: Instruction decode error"));
7530 /* Actually set up the scratch space for a displaced instruction. */
7533 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7534 CORE_ADDR to, struct displaced_step_closure *dsc)
7536 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7537 unsigned int i, len, offset;
7538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7539 int size = dsc->is_thumb? 2 : 4;
7540 const gdb_byte *bkp_insn;
7543 /* Poke modified instruction(s). */
7544 for (i = 0; i < dsc->numinsns; i++)
7546 if (debug_displaced)
7548 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7550 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7553 fprintf_unfiltered (gdb_stdlog, "%.4x",
7554 (unsigned short)dsc->modinsn[i]);
7556 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7557 (unsigned long) to + offset);
7560 write_memory_unsigned_integer (to + offset, size,
7561 byte_order_for_code,
7566 /* Choose the correct breakpoint instruction. */
7569 bkp_insn = tdep->thumb_breakpoint;
7570 len = tdep->thumb_breakpoint_size;
7574 bkp_insn = tdep->arm_breakpoint;
7575 len = tdep->arm_breakpoint_size;
7578 /* Put breakpoint afterwards. */
7579 write_memory (to + offset, bkp_insn, len);
7581 if (debug_displaced)
7582 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7583 paddress (gdbarch, from), paddress (gdbarch, to));
7586 /* Entry point for cleaning things up after a displaced instruction has been
7590 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7591 struct displaced_step_closure *dsc,
7592 CORE_ADDR from, CORE_ADDR to,
7593 struct regcache *regs)
7596 dsc->cleanup (gdbarch, regs, dsc);
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7610 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7612 if (arm_pc_is_thumb (gdbarch, memaddr))
7614 static asymbol *asym;
7615 static combined_entry_type ce;
7616 static struct coff_symbol_struct csym;
7617 static struct bfd fake_bfd;
7618 static bfd_target fake_target;
7620 if (csym.native == NULL)
7622 /* Create a fake symbol vector containing a Thumb symbol.
7623 This is solely so that the code in print_insn_little_arm()
7624 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7625 the presence of a Thumb symbol and switch to decoding
7626 Thumb instructions. */
7628 fake_target.flavour = bfd_target_coff_flavour;
7629 fake_bfd.xvec = &fake_target;
7630 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7632 csym.symbol.the_bfd = &fake_bfd;
7633 csym.symbol.name = "fake";
7634 asym = (asymbol *) & csym;
7637 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7638 info->symbols = &asym;
7641 info->symbols = NULL;
7643 if (info->endian == BFD_ENDIAN_BIG)
7644 return print_insn_big_arm (memaddr, info);
7646 return print_insn_little_arm (memaddr, info);
7649 /* The following define instruction sequences that will cause ARM
7650 cpu's to take an undefined instruction trap. These are used to
7651 signal a breakpoint to GDB.
7653 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7654 modes. A different instruction is required for each mode. The ARM
7655 cpu's can also be big or little endian. Thus four different
7656 instructions are needed to support all cases.
7658 Note: ARMv4 defines several new instructions that will take the
7659 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7660 not in fact add the new instructions. The new undefined
7661 instructions in ARMv4 are all instructions that had no defined
7662 behaviour in earlier chips. There is no guarantee that they will
7663 raise an exception, but may be treated as NOP's. In practice, it
7664 may only safe to rely on instructions matching:
7666 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7667 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7668 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7670 Even this may only true if the condition predicate is true. The
7671 following use a condition predicate of ALWAYS so it is always TRUE.
7673 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7674 and NetBSD all use a software interrupt rather than an undefined
7675 instruction to force a trap. This can be handled by by the
7676 abi-specific code during establishment of the gdbarch vector. */
7678 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7679 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7680 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7681 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7683 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7684 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7685 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7686 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7688 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7689 the program counter value to determine whether a 16-bit or 32-bit
7690 breakpoint should be used. It returns a pointer to a string of
7691 bytes that encode a breakpoint instruction, stores the length of
7692 the string to *lenptr, and adjusts the program counter (if
7693 necessary) to point to the actual memory location where the
7694 breakpoint should be inserted. */
7696 static const unsigned char *
7697 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7699 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7702 if (arm_pc_is_thumb (gdbarch, *pcptr))
7704 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7706 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7707 check whether we are replacing a 32-bit instruction. */
7708 if (tdep->thumb2_breakpoint != NULL)
7711 if (target_read_memory (*pcptr, buf, 2) == 0)
7713 unsigned short inst1;
7714 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7715 if (thumb_insn_size (inst1) == 4)
7717 *lenptr = tdep->thumb2_breakpoint_size;
7718 return tdep->thumb2_breakpoint;
7723 *lenptr = tdep->thumb_breakpoint_size;
7724 return tdep->thumb_breakpoint;
7728 *lenptr = tdep->arm_breakpoint_size;
7729 return tdep->arm_breakpoint;
7734 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7737 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7739 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7740 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7741 that this is not confused with a 32-bit ARM breakpoint. */
7745 /* Extract from an array REGBUF containing the (raw) register state a
7746 function return value of type TYPE, and copy that, in virtual
7747 format, into VALBUF. */
7750 arm_extract_return_value (struct type *type, struct regcache *regs,
7753 struct gdbarch *gdbarch = get_regcache_arch (regs);
7754 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7756 if (TYPE_CODE_FLT == TYPE_CODE (type))
7758 switch (gdbarch_tdep (gdbarch)->fp_model)
7762 /* The value is in register F0 in internal format. We need to
7763 extract the raw value and then convert it to the desired
7765 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7767 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7768 convert_from_extended (floatformat_from_type (type), tmpbuf,
7769 valbuf, gdbarch_byte_order (gdbarch));
7773 case ARM_FLOAT_SOFT_FPA:
7774 case ARM_FLOAT_SOFT_VFP:
7775 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7776 not using the VFP ABI code. */
7778 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7779 if (TYPE_LENGTH (type) > 4)
7780 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7781 valbuf + INT_REGISTER_SIZE);
7785 internal_error (__FILE__, __LINE__,
7786 _("arm_extract_return_value: "
7787 "Floating point model not supported"));
7791 else if (TYPE_CODE (type) == TYPE_CODE_INT
7792 || TYPE_CODE (type) == TYPE_CODE_CHAR
7793 || TYPE_CODE (type) == TYPE_CODE_BOOL
7794 || TYPE_CODE (type) == TYPE_CODE_PTR
7795 || TYPE_CODE (type) == TYPE_CODE_REF
7796 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7798 /* If the type is a plain integer, then the access is
7799 straight-forward. Otherwise we have to play around a bit
7801 int len = TYPE_LENGTH (type);
7802 int regno = ARM_A1_REGNUM;
7807 /* By using store_unsigned_integer we avoid having to do
7808 anything special for small big-endian values. */
7809 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7810 store_unsigned_integer (valbuf,
7811 (len > INT_REGISTER_SIZE
7812 ? INT_REGISTER_SIZE : len),
7814 len -= INT_REGISTER_SIZE;
7815 valbuf += INT_REGISTER_SIZE;
7820 /* For a structure or union the behaviour is as if the value had
7821 been stored to word-aligned memory and then loaded into
7822 registers with 32-bit load instruction(s). */
7823 int len = TYPE_LENGTH (type);
7824 int regno = ARM_A1_REGNUM;
7825 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7829 regcache_cooked_read (regs, regno++, tmpbuf);
7830 memcpy (valbuf, tmpbuf,
7831 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7832 len -= INT_REGISTER_SIZE;
7833 valbuf += INT_REGISTER_SIZE;
7839 /* Will a function return an aggregate type in memory or in a
7840 register? Return 0 if an aggregate type can be returned in a
7841 register, 1 if it must be returned in memory. */
7844 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7846 enum type_code code;
7848 type = check_typedef (type);
7850 /* Simple, non-aggregate types (ie not including vectors and
7851 complex) are always returned in a register (or registers). */
7852 code = TYPE_CODE (type);
7853 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7854 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7857 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7859 /* Vector values should be returned using ARM registers if they
7860 are not over 16 bytes. */
7861 return (TYPE_LENGTH (type) > 16);
7864 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7866 /* The AAPCS says all aggregates not larger than a word are returned
7868 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7877 /* All aggregate types that won't fit in a register must be returned
7879 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7882 /* In the ARM ABI, "integer" like aggregate types are returned in
7883 registers. For an aggregate type to be integer like, its size
7884 must be less than or equal to INT_REGISTER_SIZE and the
7885 offset of each addressable subfield must be zero. Note that bit
7886 fields are not addressable, and all addressable subfields of
7887 unions always start at offset zero.
7889 This function is based on the behaviour of GCC 2.95.1.
7890 See: gcc/arm.c: arm_return_in_memory() for details.
7892 Note: All versions of GCC before GCC 2.95.2 do not set up the
7893 parameters correctly for a function returning the following
7894 structure: struct { float f;}; This should be returned in memory,
7895 not a register. Richard Earnshaw sent me a patch, but I do not
7896 know of any way to detect if a function like the above has been
7897 compiled with the correct calling convention. */
7899 /* Assume all other aggregate types can be returned in a register.
7900 Run a check for structures, unions and arrays. */
7903 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7906 /* Need to check if this struct/union is "integer" like. For
7907 this to be true, its size must be less than or equal to
7908 INT_REGISTER_SIZE and the offset of each addressable
7909 subfield must be zero. Note that bit fields are not
7910 addressable, and unions always start at offset zero. If any
7911 of the subfields is a floating point type, the struct/union
7912 cannot be an integer type. */
7914 /* For each field in the object, check:
7915 1) Is it FP? --> yes, nRc = 1;
7916 2) Is it addressable (bitpos != 0) and
7917 not packed (bitsize == 0)?
7921 for (i = 0; i < TYPE_NFIELDS (type); i++)
7923 enum type_code field_type_code;
7926 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7929 /* Is it a floating point type field? */
7930 if (field_type_code == TYPE_CODE_FLT)
7936 /* If bitpos != 0, then we have to care about it. */
7937 if (TYPE_FIELD_BITPOS (type, i) != 0)
7939 /* Bitfields are not addressable. If the field bitsize is
7940 zero, then the field is not packed. Hence it cannot be
7941 a bitfield or any other packed type. */
7942 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7955 /* Write into appropriate registers a function return value of type
7956 TYPE, given in virtual format. */
7959 arm_store_return_value (struct type *type, struct regcache *regs,
7960 const gdb_byte *valbuf)
7962 struct gdbarch *gdbarch = get_regcache_arch (regs);
7963 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7965 if (TYPE_CODE (type) == TYPE_CODE_FLT)
7967 gdb_byte buf[MAX_REGISTER_SIZE];
7969 switch (gdbarch_tdep (gdbarch)->fp_model)
7973 convert_to_extended (floatformat_from_type (type), buf, valbuf,
7974 gdbarch_byte_order (gdbarch));
7975 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
7978 case ARM_FLOAT_SOFT_FPA:
7979 case ARM_FLOAT_SOFT_VFP:
7980 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7981 not using the VFP ABI code. */
7983 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
7984 if (TYPE_LENGTH (type) > 4)
7985 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7986 valbuf + INT_REGISTER_SIZE);
7990 internal_error (__FILE__, __LINE__,
7991 _("arm_store_return_value: Floating "
7992 "point model not supported"));
7996 else if (TYPE_CODE (type) == TYPE_CODE_INT
7997 || TYPE_CODE (type) == TYPE_CODE_CHAR
7998 || TYPE_CODE (type) == TYPE_CODE_BOOL
7999 || TYPE_CODE (type) == TYPE_CODE_PTR
8000 || TYPE_CODE (type) == TYPE_CODE_REF
8001 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8003 if (TYPE_LENGTH (type) <= 4)
8005 /* Values of one word or less are zero/sign-extended and
8007 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8008 LONGEST val = unpack_long (type, valbuf);
8010 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8011 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8015 /* Integral values greater than one word are stored in consecutive
8016 registers starting with r0. This will always be a multiple of
8017 the regiser size. */
8018 int len = TYPE_LENGTH (type);
8019 int regno = ARM_A1_REGNUM;
8023 regcache_cooked_write (regs, regno++, valbuf);
8024 len -= INT_REGISTER_SIZE;
8025 valbuf += INT_REGISTER_SIZE;
8031 /* For a structure or union the behaviour is as if the value had
8032 been stored to word-aligned memory and then loaded into
8033 registers with 32-bit load instruction(s). */
8034 int len = TYPE_LENGTH (type);
8035 int regno = ARM_A1_REGNUM;
8036 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8040 memcpy (tmpbuf, valbuf,
8041 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8042 regcache_cooked_write (regs, regno++, tmpbuf);
8043 len -= INT_REGISTER_SIZE;
8044 valbuf += INT_REGISTER_SIZE;
8050 /* Handle function return values. */
8052 static enum return_value_convention
8053 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8054 struct type *valtype, struct regcache *regcache,
8055 gdb_byte *readbuf, const gdb_byte *writebuf)
8057 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8058 struct type *func_type = function ? value_type (function) : NULL;
8059 enum arm_vfp_cprc_base_type vfp_base_type;
8062 if (arm_vfp_abi_for_function (gdbarch, func_type)
8063 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8065 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8066 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8068 for (i = 0; i < vfp_base_count; i++)
8070 if (reg_char == 'q')
8073 arm_neon_quad_write (gdbarch, regcache, i,
8074 writebuf + i * unit_length);
8077 arm_neon_quad_read (gdbarch, regcache, i,
8078 readbuf + i * unit_length);
8085 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8086 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8089 regcache_cooked_write (regcache, regnum,
8090 writebuf + i * unit_length);
8092 regcache_cooked_read (regcache, regnum,
8093 readbuf + i * unit_length);
8096 return RETURN_VALUE_REGISTER_CONVENTION;
8099 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8100 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8101 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8103 if (tdep->struct_return == pcc_struct_return
8104 || arm_return_in_memory (gdbarch, valtype))
8105 return RETURN_VALUE_STRUCT_CONVENTION;
8107 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8109 if (arm_return_in_memory (gdbarch, valtype))
8110 return RETURN_VALUE_STRUCT_CONVENTION;
8114 arm_store_return_value (valtype, regcache, writebuf);
8117 arm_extract_return_value (valtype, regcache, readbuf);
8119 return RETURN_VALUE_REGISTER_CONVENTION;
8124 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8126 struct gdbarch *gdbarch = get_frame_arch (frame);
8127 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8128 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8130 gdb_byte buf[INT_REGISTER_SIZE];
8132 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8134 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8138 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8142 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8143 return the target PC. Otherwise return 0. */
8146 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8150 CORE_ADDR start_addr;
8152 /* Find the starting address and name of the function containing the PC. */
8153 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8155 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8157 start_addr = arm_skip_bx_reg (frame, pc);
8158 if (start_addr != 0)
8164 /* If PC is in a Thumb call or return stub, return the address of the
8165 target PC, which is in a register. The thunk functions are called
8166 _call_via_xx, where x is the register name. The possible names
8167 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8168 functions, named __ARM_call_via_r[0-7]. */
8169 if (startswith (name, "_call_via_")
8170 || startswith (name, "__ARM_call_via_"))
8172 /* Use the name suffix to determine which register contains the
8174 static char *table[15] =
8175 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8176 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8179 int offset = strlen (name) - 2;
8181 for (regno = 0; regno <= 14; regno++)
8182 if (strcmp (&name[offset], table[regno]) == 0)
8183 return get_frame_register_unsigned (frame, regno);
8186 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8187 non-interworking calls to foo. We could decode the stubs
8188 to find the target but it's easier to use the symbol table. */
8189 namelen = strlen (name);
8190 if (name[0] == '_' && name[1] == '_'
8191 && ((namelen > 2 + strlen ("_from_thumb")
8192 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8193 || (namelen > 2 + strlen ("_from_arm")
8194 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8197 int target_len = namelen - 2;
8198 struct bound_minimal_symbol minsym;
8199 struct objfile *objfile;
8200 struct obj_section *sec;
8202 if (name[namelen - 1] == 'b')
8203 target_len -= strlen ("_from_thumb");
8205 target_len -= strlen ("_from_arm");
8207 target_name = (char *) alloca (target_len + 1);
8208 memcpy (target_name, name + 2, target_len);
8209 target_name[target_len] = '\0';
8211 sec = find_pc_section (pc);
8212 objfile = (sec == NULL) ? NULL : sec->objfile;
8213 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8214 if (minsym.minsym != NULL)
8215 return BMSYMBOL_VALUE_ADDRESS (minsym);
8220 return 0; /* not a stub */
8224 set_arm_command (char *args, int from_tty)
8226 printf_unfiltered (_("\
8227 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8228 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8232 show_arm_command (char *args, int from_tty)
8234 cmd_show_list (showarmcmdlist, from_tty, "");
8238 arm_update_current_architecture (void)
8240 struct gdbarch_info info;
8242 /* If the current architecture is not ARM, we have nothing to do. */
8243 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8246 /* Update the architecture. */
8247 gdbarch_info_init (&info);
8249 if (!gdbarch_update_p (info))
8250 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8254 set_fp_model_sfunc (char *args, int from_tty,
8255 struct cmd_list_element *c)
8259 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8260 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8262 arm_fp_model = (enum arm_float_model) fp_model;
8266 if (fp_model == ARM_FLOAT_LAST)
8267 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8270 arm_update_current_architecture ();
8274 show_fp_model (struct ui_file *file, int from_tty,
8275 struct cmd_list_element *c, const char *value)
8277 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8279 if (arm_fp_model == ARM_FLOAT_AUTO
8280 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8281 fprintf_filtered (file, _("\
8282 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8283 fp_model_strings[tdep->fp_model]);
8285 fprintf_filtered (file, _("\
8286 The current ARM floating point model is \"%s\".\n"),
8287 fp_model_strings[arm_fp_model]);
8291 arm_set_abi (char *args, int from_tty,
8292 struct cmd_list_element *c)
8296 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8297 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8299 arm_abi_global = (enum arm_abi_kind) arm_abi;
8303 if (arm_abi == ARM_ABI_LAST)
8304 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8307 arm_update_current_architecture ();
8311 arm_show_abi (struct ui_file *file, int from_tty,
8312 struct cmd_list_element *c, const char *value)
8314 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8316 if (arm_abi_global == ARM_ABI_AUTO
8317 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8318 fprintf_filtered (file, _("\
8319 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8320 arm_abi_strings[tdep->arm_abi]);
8322 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8327 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8328 struct cmd_list_element *c, const char *value)
8330 fprintf_filtered (file,
8331 _("The current execution mode assumed "
8332 "(when symbols are unavailable) is \"%s\".\n"),
8333 arm_fallback_mode_string);
8337 arm_show_force_mode (struct ui_file *file, int from_tty,
8338 struct cmd_list_element *c, const char *value)
8340 fprintf_filtered (file,
8341 _("The current execution mode assumed "
8342 "(even when symbols are available) is \"%s\".\n"),
8343 arm_force_mode_string);
8346 /* If the user changes the register disassembly style used for info
8347 register and other commands, we have to also switch the style used
8348 in opcodes for disassembly output. This function is run in the "set
8349 arm disassembly" command, and does that. */
8352 set_disassembly_style_sfunc (char *args, int from_tty,
8353 struct cmd_list_element *c)
8355 set_disassembly_style ();
8358 /* Return the ARM register name corresponding to register I. */
8360 arm_register_name (struct gdbarch *gdbarch, int i)
8362 const int num_regs = gdbarch_num_regs (gdbarch);
8364 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8365 && i >= num_regs && i < num_regs + 32)
8367 static const char *const vfp_pseudo_names[] = {
8368 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8369 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8370 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8371 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8374 return vfp_pseudo_names[i - num_regs];
8377 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8378 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8380 static const char *const neon_pseudo_names[] = {
8381 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8382 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8385 return neon_pseudo_names[i - num_regs - 32];
8388 if (i >= ARRAY_SIZE (arm_register_names))
8389 /* These registers are only supported on targets which supply
8390 an XML description. */
8393 return arm_register_names[i];
8397 set_disassembly_style (void)
8401 /* Find the style that the user wants. */
8402 for (current = 0; current < num_disassembly_options; current++)
8403 if (disassembly_style == valid_disassembly_styles[current])
8405 gdb_assert (current < num_disassembly_options);
8407 /* Synchronize the disassembler. */
8408 set_arm_regname_option (current);
8411 /* Test whether the coff symbol specific value corresponds to a Thumb
8415 coff_sym_is_thumb (int val)
8417 return (val == C_THUMBEXT
8418 || val == C_THUMBSTAT
8419 || val == C_THUMBEXTFUNC
8420 || val == C_THUMBSTATFUNC
8421 || val == C_THUMBLABEL);
8424 /* arm_coff_make_msymbol_special()
8425 arm_elf_make_msymbol_special()
8427 These functions test whether the COFF or ELF symbol corresponds to
8428 an address in thumb code, and set a "special" bit in a minimal
8429 symbol to indicate that it does. */
8432 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8434 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8435 == ST_BRANCH_TO_THUMB)
8436 MSYMBOL_SET_SPECIAL (msym);
8440 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8442 if (coff_sym_is_thumb (val))
8443 MSYMBOL_SET_SPECIAL (msym);
8447 arm_objfile_data_free (struct objfile *objfile, void *arg)
8449 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8452 for (i = 0; i < objfile->obfd->section_count; i++)
8453 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8457 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8460 const char *name = bfd_asymbol_name (sym);
8461 struct arm_per_objfile *data;
8462 VEC(arm_mapping_symbol_s) **map_p;
8463 struct arm_mapping_symbol new_map_sym;
8465 gdb_assert (name[0] == '$');
8466 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8469 data = (struct arm_per_objfile *) objfile_data (objfile,
8470 arm_objfile_data_key);
8473 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8474 struct arm_per_objfile);
8475 set_objfile_data (objfile, arm_objfile_data_key, data);
8476 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8477 objfile->obfd->section_count,
8478 VEC(arm_mapping_symbol_s) *);
8480 map_p = &data->section_maps[bfd_get_section (sym)->index];
8482 new_map_sym.value = sym->value;
8483 new_map_sym.type = name[1];
8485 /* Assume that most mapping symbols appear in order of increasing
8486 value. If they were randomly distributed, it would be faster to
8487 always push here and then sort at first use. */
8488 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8490 struct arm_mapping_symbol *prev_map_sym;
8492 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8493 if (prev_map_sym->value >= sym->value)
8496 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8497 arm_compare_mapping_symbols);
8498 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8503 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8507 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8509 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8510 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8512 /* If necessary, set the T bit. */
8515 ULONGEST val, t_bit;
8516 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8517 t_bit = arm_psr_thumb_bit (gdbarch);
8518 if (arm_pc_is_thumb (gdbarch, pc))
8519 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8522 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8527 /* Read the contents of a NEON quad register, by reading from two
8528 double registers. This is used to implement the quad pseudo
8529 registers, and for argument passing in case the quad registers are
8530 missing; vectors are passed in quad registers when using the VFP
8531 ABI, even if a NEON unit is not present. REGNUM is the index of
8532 the quad register, in [0, 15]. */
8534 static enum register_status
8535 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8536 int regnum, gdb_byte *buf)
8539 gdb_byte reg_buf[8];
8540 int offset, double_regnum;
8541 enum register_status status;
8543 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8544 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8547 /* d0 is always the least significant half of q0. */
8548 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8553 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8554 if (status != REG_VALID)
8556 memcpy (buf + offset, reg_buf, 8);
8558 offset = 8 - offset;
8559 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8560 if (status != REG_VALID)
8562 memcpy (buf + offset, reg_buf, 8);
8567 static enum register_status
8568 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8569 int regnum, gdb_byte *buf)
8571 const int num_regs = gdbarch_num_regs (gdbarch);
8573 gdb_byte reg_buf[8];
8574 int offset, double_regnum;
8576 gdb_assert (regnum >= num_regs);
8579 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8580 /* Quad-precision register. */
8581 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8584 enum register_status status;
8586 /* Single-precision register. */
8587 gdb_assert (regnum < 32);
8589 /* s0 is always the least significant half of d0. */
8590 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8591 offset = (regnum & 1) ? 0 : 4;
8593 offset = (regnum & 1) ? 4 : 0;
8595 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8596 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8599 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8600 if (status == REG_VALID)
8601 memcpy (buf, reg_buf + offset, 4);
8606 /* Store the contents of BUF to a NEON quad register, by writing to
8607 two double registers. This is used to implement the quad pseudo
8608 registers, and for argument passing in case the quad registers are
8609 missing; vectors are passed in quad registers when using the VFP
8610 ABI, even if a NEON unit is not present. REGNUM is the index
8611 of the quad register, in [0, 15]. */
8614 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8615 int regnum, const gdb_byte *buf)
8618 int offset, double_regnum;
8620 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8621 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8624 /* d0 is always the least significant half of q0. */
8625 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8630 regcache_raw_write (regcache, double_regnum, buf + offset);
8631 offset = 8 - offset;
8632 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8636 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8637 int regnum, const gdb_byte *buf)
8639 const int num_regs = gdbarch_num_regs (gdbarch);
8641 gdb_byte reg_buf[8];
8642 int offset, double_regnum;
8644 gdb_assert (regnum >= num_regs);
8647 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8648 /* Quad-precision register. */
8649 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8652 /* Single-precision register. */
8653 gdb_assert (regnum < 32);
8655 /* s0 is always the least significant half of d0. */
8656 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8657 offset = (regnum & 1) ? 0 : 4;
8659 offset = (regnum & 1) ? 4 : 0;
8661 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8662 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8665 regcache_raw_read (regcache, double_regnum, reg_buf);
8666 memcpy (reg_buf + offset, buf, 4);
8667 regcache_raw_write (regcache, double_regnum, reg_buf);
8671 static struct value *
8672 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8674 const int *reg_p = (const int *) baton;
8675 return value_of_register (*reg_p, frame);
8678 static enum gdb_osabi
8679 arm_elf_osabi_sniffer (bfd *abfd)
8681 unsigned int elfosabi;
8682 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8684 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8686 if (elfosabi == ELFOSABI_ARM)
8687 /* GNU tools use this value. Check note sections in this case,
8689 bfd_map_over_sections (abfd,
8690 generic_elf_osabi_sniff_abi_tag_sections,
8693 /* Anything else will be handled by the generic ELF sniffer. */
8698 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8699 struct reggroup *group)
8701 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8702 this, FPS register belongs to save_regroup, restore_reggroup, and
8703 all_reggroup, of course. */
8704 if (regnum == ARM_FPS_REGNUM)
8705 return (group == float_reggroup
8706 || group == save_reggroup
8707 || group == restore_reggroup
8708 || group == all_reggroup);
8710 return default_register_reggroup_p (gdbarch, regnum, group);
8714 /* For backward-compatibility we allow two 'g' packet lengths with
8715 the remote protocol depending on whether FPA registers are
8716 supplied. M-profile targets do not have FPA registers, but some
8717 stubs already exist in the wild which use a 'g' packet which
8718 supplies them albeit with dummy values. The packet format which
8719 includes FPA registers should be considered deprecated for
8720 M-profile targets. */
8723 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8725 if (gdbarch_tdep (gdbarch)->is_m)
8727 /* If we know from the executable this is an M-profile target,
8728 cater for remote targets whose register set layout is the
8729 same as the FPA layout. */
8730 register_remote_g_packet_guess (gdbarch,
8731 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8732 (16 * INT_REGISTER_SIZE)
8733 + (8 * FP_REGISTER_SIZE)
8734 + (2 * INT_REGISTER_SIZE),
8735 tdesc_arm_with_m_fpa_layout);
8737 /* The regular M-profile layout. */
8738 register_remote_g_packet_guess (gdbarch,
8739 /* r0-r12,sp,lr,pc; xpsr */
8740 (16 * INT_REGISTER_SIZE)
8741 + INT_REGISTER_SIZE,
8744 /* M-profile plus M4F VFP. */
8745 register_remote_g_packet_guess (gdbarch,
8746 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8747 (16 * INT_REGISTER_SIZE)
8748 + (16 * VFP_REGISTER_SIZE)
8749 + (2 * INT_REGISTER_SIZE),
8750 tdesc_arm_with_m_vfp_d16);
8753 /* Otherwise we don't have a useful guess. */
8757 /* Initialize the current architecture based on INFO. If possible,
8758 re-use an architecture from ARCHES, which is a list of
8759 architectures already created during this debugging session.
8761 Called e.g. at program startup, when reading a core file, and when
8762 reading a binary file. */
8764 static struct gdbarch *
8765 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8767 struct gdbarch_tdep *tdep;
8768 struct gdbarch *gdbarch;
8769 struct gdbarch_list *best_arch;
8770 enum arm_abi_kind arm_abi = arm_abi_global;
8771 enum arm_float_model fp_model = arm_fp_model;
8772 struct tdesc_arch_data *tdesc_data = NULL;
8774 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8775 int have_wmmx_registers = 0;
8777 int have_fpa_registers = 1;
8778 const struct target_desc *tdesc = info.target_desc;
8780 /* If we have an object to base this architecture on, try to determine
8783 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8785 int ei_osabi, e_flags;
8787 switch (bfd_get_flavour (info.abfd))
8789 case bfd_target_aout_flavour:
8790 /* Assume it's an old APCS-style ABI. */
8791 arm_abi = ARM_ABI_APCS;
8794 case bfd_target_coff_flavour:
8795 /* Assume it's an old APCS-style ABI. */
8797 arm_abi = ARM_ABI_APCS;
8800 case bfd_target_elf_flavour:
8801 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8802 e_flags = elf_elfheader (info.abfd)->e_flags;
8804 if (ei_osabi == ELFOSABI_ARM)
8806 /* GNU tools used to use this value, but do not for EABI
8807 objects. There's nowhere to tag an EABI version
8808 anyway, so assume APCS. */
8809 arm_abi = ARM_ABI_APCS;
8811 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8813 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8814 int attr_arch, attr_profile;
8818 case EF_ARM_EABI_UNKNOWN:
8819 /* Assume GNU tools. */
8820 arm_abi = ARM_ABI_APCS;
8823 case EF_ARM_EABI_VER4:
8824 case EF_ARM_EABI_VER5:
8825 arm_abi = ARM_ABI_AAPCS;
8826 /* EABI binaries default to VFP float ordering.
8827 They may also contain build attributes that can
8828 be used to identify if the VFP argument-passing
8830 if (fp_model == ARM_FLOAT_AUTO)
8833 switch (bfd_elf_get_obj_attr_int (info.abfd,
8837 case AEABI_VFP_args_base:
8838 /* "The user intended FP parameter/result
8839 passing to conform to AAPCS, base
8841 fp_model = ARM_FLOAT_SOFT_VFP;
8843 case AEABI_VFP_args_vfp:
8844 /* "The user intended FP parameter/result
8845 passing to conform to AAPCS, VFP
8847 fp_model = ARM_FLOAT_VFP;
8849 case AEABI_VFP_args_toolchain:
8850 /* "The user intended FP parameter/result
8851 passing to conform to tool chain-specific
8852 conventions" - we don't know any such
8853 conventions, so leave it as "auto". */
8855 case AEABI_VFP_args_compatible:
8856 /* "Code is compatible with both the base
8857 and VFP variants; the user did not permit
8858 non-variadic functions to pass FP
8859 parameters/results" - leave it as
8863 /* Attribute value not mentioned in the
8864 November 2012 ABI, so leave it as
8869 fp_model = ARM_FLOAT_SOFT_VFP;
8875 /* Leave it as "auto". */
8876 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8881 /* Detect M-profile programs. This only works if the
8882 executable file includes build attributes; GCC does
8883 copy them to the executable, but e.g. RealView does
8885 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8887 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8889 Tag_CPU_arch_profile);
8890 /* GCC specifies the profile for v6-M; RealView only
8891 specifies the profile for architectures starting with
8892 V7 (as opposed to architectures with a tag
8893 numerically greater than TAG_CPU_ARCH_V7). */
8894 if (!tdesc_has_registers (tdesc)
8895 && (attr_arch == TAG_CPU_ARCH_V6_M
8896 || attr_arch == TAG_CPU_ARCH_V6S_M
8897 || attr_profile == 'M'))
8902 if (fp_model == ARM_FLOAT_AUTO)
8904 int e_flags = elf_elfheader (info.abfd)->e_flags;
8906 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8909 /* Leave it as "auto". Strictly speaking this case
8910 means FPA, but almost nobody uses that now, and
8911 many toolchains fail to set the appropriate bits
8912 for the floating-point model they use. */
8914 case EF_ARM_SOFT_FLOAT:
8915 fp_model = ARM_FLOAT_SOFT_FPA;
8917 case EF_ARM_VFP_FLOAT:
8918 fp_model = ARM_FLOAT_VFP;
8920 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8921 fp_model = ARM_FLOAT_SOFT_VFP;
8926 if (e_flags & EF_ARM_BE8)
8927 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8932 /* Leave it as "auto". */
8937 /* Check any target description for validity. */
8938 if (tdesc_has_registers (tdesc))
8940 /* For most registers we require GDB's default names; but also allow
8941 the numeric names for sp / lr / pc, as a convenience. */
8942 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8943 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8944 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8946 const struct tdesc_feature *feature;
8949 feature = tdesc_find_feature (tdesc,
8950 "org.gnu.gdb.arm.core");
8951 if (feature == NULL)
8953 feature = tdesc_find_feature (tdesc,
8954 "org.gnu.gdb.arm.m-profile");
8955 if (feature == NULL)
8961 tdesc_data = tdesc_data_alloc ();
8964 for (i = 0; i < ARM_SP_REGNUM; i++)
8965 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8966 arm_register_names[i]);
8967 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8970 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8973 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8977 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8978 ARM_PS_REGNUM, "xpsr");
8980 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8981 ARM_PS_REGNUM, "cpsr");
8985 tdesc_data_cleanup (tdesc_data);
8989 feature = tdesc_find_feature (tdesc,
8990 "org.gnu.gdb.arm.fpa");
8991 if (feature != NULL)
8994 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
8995 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8996 arm_register_names[i]);
8999 tdesc_data_cleanup (tdesc_data);
9004 have_fpa_registers = 0;
9006 feature = tdesc_find_feature (tdesc,
9007 "org.gnu.gdb.xscale.iwmmxt");
9008 if (feature != NULL)
9010 static const char *const iwmmxt_names[] = {
9011 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9012 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9013 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9014 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9018 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9020 &= tdesc_numbered_register (feature, tdesc_data, i,
9021 iwmmxt_names[i - ARM_WR0_REGNUM]);
9023 /* Check for the control registers, but do not fail if they
9025 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9026 tdesc_numbered_register (feature, tdesc_data, i,
9027 iwmmxt_names[i - ARM_WR0_REGNUM]);
9029 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9031 &= tdesc_numbered_register (feature, tdesc_data, i,
9032 iwmmxt_names[i - ARM_WR0_REGNUM]);
9036 tdesc_data_cleanup (tdesc_data);
9040 have_wmmx_registers = 1;
9043 /* If we have a VFP unit, check whether the single precision registers
9044 are present. If not, then we will synthesize them as pseudo
9046 feature = tdesc_find_feature (tdesc,
9047 "org.gnu.gdb.arm.vfp");
9048 if (feature != NULL)
9050 static const char *const vfp_double_names[] = {
9051 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9052 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9053 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9054 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9057 /* Require the double precision registers. There must be either
9060 for (i = 0; i < 32; i++)
9062 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9064 vfp_double_names[i]);
9068 if (!valid_p && i == 16)
9071 /* Also require FPSCR. */
9072 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9073 ARM_FPSCR_REGNUM, "fpscr");
9076 tdesc_data_cleanup (tdesc_data);
9080 if (tdesc_unnumbered_register (feature, "s0") == 0)
9081 have_vfp_pseudos = 1;
9083 vfp_register_count = i;
9085 /* If we have VFP, also check for NEON. The architecture allows
9086 NEON without VFP (integer vector operations only), but GDB
9087 does not support that. */
9088 feature = tdesc_find_feature (tdesc,
9089 "org.gnu.gdb.arm.neon");
9090 if (feature != NULL)
9092 /* NEON requires 32 double-precision registers. */
9095 tdesc_data_cleanup (tdesc_data);
9099 /* If there are quad registers defined by the stub, use
9100 their type; otherwise (normally) provide them with
9101 the default type. */
9102 if (tdesc_unnumbered_register (feature, "q0") == 0)
9103 have_neon_pseudos = 1;
9110 /* If there is already a candidate, use it. */
9111 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9113 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9115 if (arm_abi != ARM_ABI_AUTO
9116 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9119 if (fp_model != ARM_FLOAT_AUTO
9120 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9123 /* There are various other properties in tdep that we do not
9124 need to check here: those derived from a target description,
9125 since gdbarches with a different target description are
9126 automatically disqualified. */
9128 /* Do check is_m, though, since it might come from the binary. */
9129 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9132 /* Found a match. */
9136 if (best_arch != NULL)
9138 if (tdesc_data != NULL)
9139 tdesc_data_cleanup (tdesc_data);
9140 return best_arch->gdbarch;
9143 tdep = XCNEW (struct gdbarch_tdep);
9144 gdbarch = gdbarch_alloc (&info, tdep);
9146 /* Record additional information about the architecture we are defining.
9147 These are gdbarch discriminators, like the OSABI. */
9148 tdep->arm_abi = arm_abi;
9149 tdep->fp_model = fp_model;
9151 tdep->have_fpa_registers = have_fpa_registers;
9152 tdep->have_wmmx_registers = have_wmmx_registers;
9153 gdb_assert (vfp_register_count == 0
9154 || vfp_register_count == 16
9155 || vfp_register_count == 32);
9156 tdep->vfp_register_count = vfp_register_count;
9157 tdep->have_vfp_pseudos = have_vfp_pseudos;
9158 tdep->have_neon_pseudos = have_neon_pseudos;
9159 tdep->have_neon = have_neon;
9161 arm_register_g_packet_guesses (gdbarch);
9164 switch (info.byte_order_for_code)
9166 case BFD_ENDIAN_BIG:
9167 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9168 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9169 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9170 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9174 case BFD_ENDIAN_LITTLE:
9175 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9176 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9177 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9178 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9183 internal_error (__FILE__, __LINE__,
9184 _("arm_gdbarch_init: bad byte order for float format"));
9187 /* On ARM targets char defaults to unsigned. */
9188 set_gdbarch_char_signed (gdbarch, 0);
9190 /* Note: for displaced stepping, this includes the breakpoint, and one word
9191 of additional scratch space. This setting isn't used for anything beside
9192 displaced stepping at present. */
9193 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9195 /* This should be low enough for everything. */
9196 tdep->lowest_pc = 0x20;
9197 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9199 /* The default, for both APCS and AAPCS, is to return small
9200 structures in registers. */
9201 tdep->struct_return = reg_struct_return;
9203 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9204 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9206 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9208 /* Frame handling. */
9209 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9210 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9211 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9213 frame_base_set_default (gdbarch, &arm_normal_base);
9215 /* Address manipulation. */
9216 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9218 /* Advance PC across function entry code. */
9219 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9221 /* Detect whether PC is at a point where the stack has been destroyed. */
9222 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9224 /* Skip trampolines. */
9225 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9227 /* The stack grows downward. */
9228 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9230 /* Breakpoint manipulation. */
9231 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9232 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9233 arm_remote_breakpoint_from_pc);
9235 /* Information about registers, etc. */
9236 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9237 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9238 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9239 set_gdbarch_register_type (gdbarch, arm_register_type);
9240 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9242 /* This "info float" is FPA-specific. Use the generic version if we
9244 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9245 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9247 /* Internal <-> external register number maps. */
9248 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9249 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9251 set_gdbarch_register_name (gdbarch, arm_register_name);
9253 /* Returning results. */
9254 set_gdbarch_return_value (gdbarch, arm_return_value);
9257 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9259 /* Minsymbol frobbing. */
9260 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9261 set_gdbarch_coff_make_msymbol_special (gdbarch,
9262 arm_coff_make_msymbol_special);
9263 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9265 /* Thumb-2 IT block support. */
9266 set_gdbarch_adjust_breakpoint_address (gdbarch,
9267 arm_adjust_breakpoint_address);
9269 /* Virtual tables. */
9270 set_gdbarch_vbit_in_delta (gdbarch, 1);
9272 /* Hook in the ABI-specific overrides, if they have been registered. */
9273 gdbarch_init_osabi (info, gdbarch);
9275 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9277 /* Add some default predicates. */
9279 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9280 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9281 dwarf2_append_unwinders (gdbarch);
9282 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9283 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9285 /* Now we have tuned the configuration, set a few final things,
9286 based on what the OS ABI has told us. */
9288 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9289 binaries are always marked. */
9290 if (tdep->arm_abi == ARM_ABI_AUTO)
9291 tdep->arm_abi = ARM_ABI_APCS;
9293 /* Watchpoints are not steppable. */
9294 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9296 /* We used to default to FPA for generic ARM, but almost nobody
9297 uses that now, and we now provide a way for the user to force
9298 the model. So default to the most useful variant. */
9299 if (tdep->fp_model == ARM_FLOAT_AUTO)
9300 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9302 if (tdep->jb_pc >= 0)
9303 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9305 /* Floating point sizes and format. */
9306 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9307 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9309 set_gdbarch_double_format
9310 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9311 set_gdbarch_long_double_format
9312 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9316 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9317 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9320 if (have_vfp_pseudos)
9322 /* NOTE: These are the only pseudo registers used by
9323 the ARM target at the moment. If more are added, a
9324 little more care in numbering will be needed. */
9326 int num_pseudos = 32;
9327 if (have_neon_pseudos)
9329 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9330 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9331 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9336 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9338 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9340 /* Override tdesc_register_type to adjust the types of VFP
9341 registers for NEON. */
9342 set_gdbarch_register_type (gdbarch, arm_register_type);
9345 /* Add standard register aliases. We add aliases even for those
9346 nanes which are used by the current architecture - it's simpler,
9347 and does no harm, since nothing ever lists user registers. */
9348 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9349 user_reg_add (gdbarch, arm_register_aliases[i].name,
9350 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9356 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9358 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9363 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9364 (unsigned long) tdep->lowest_pc);
9367 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9370 _initialize_arm_tdep (void)
9372 struct ui_file *stb;
9374 const char *setname;
9375 const char *setdesc;
9376 const char *const *regnames;
9378 static char *helptext;
9379 char regdesc[1024], *rdptr = regdesc;
9380 size_t rest = sizeof (regdesc);
9382 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9384 arm_objfile_data_key
9385 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9387 /* Add ourselves to objfile event chain. */
9388 observer_attach_new_objfile (arm_exidx_new_objfile);
9390 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9392 /* Register an ELF OS ABI sniffer for ARM binaries. */
9393 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9394 bfd_target_elf_flavour,
9395 arm_elf_osabi_sniffer);
9397 /* Initialize the standard target descriptions. */
9398 initialize_tdesc_arm_with_m ();
9399 initialize_tdesc_arm_with_m_fpa_layout ();
9400 initialize_tdesc_arm_with_m_vfp_d16 ();
9401 initialize_tdesc_arm_with_iwmmxt ();
9402 initialize_tdesc_arm_with_vfpv2 ();
9403 initialize_tdesc_arm_with_vfpv3 ();
9404 initialize_tdesc_arm_with_neon ();
9406 /* Get the number of possible sets of register names defined in opcodes. */
9407 num_disassembly_options = get_arm_regname_num_options ();
9409 /* Add root prefix command for all "set arm"/"show arm" commands. */
9410 add_prefix_cmd ("arm", no_class, set_arm_command,
9411 _("Various ARM-specific commands."),
9412 &setarmcmdlist, "set arm ", 0, &setlist);
9414 add_prefix_cmd ("arm", no_class, show_arm_command,
9415 _("Various ARM-specific commands."),
9416 &showarmcmdlist, "show arm ", 0, &showlist);
9418 /* Sync the opcode insn printer with our register viewer. */
9419 parse_arm_disassembler_option ("reg-names-std");
9421 /* Initialize the array that will be passed to
9422 add_setshow_enum_cmd(). */
9423 valid_disassembly_styles = XNEWVEC (const char *,
9424 num_disassembly_options + 1);
9425 for (i = 0; i < num_disassembly_options; i++)
9427 get_arm_regnames (i, &setname, &setdesc, ®names);
9428 valid_disassembly_styles[i] = setname;
9429 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9432 /* When we find the default names, tell the disassembler to use
9434 if (!strcmp (setname, "std"))
9436 disassembly_style = setname;
9437 set_arm_regname_option (i);
9440 /* Mark the end of valid options. */
9441 valid_disassembly_styles[num_disassembly_options] = NULL;
9443 /* Create the help text. */
9444 stb = mem_fileopen ();
9445 fprintf_unfiltered (stb, "%s%s%s",
9446 _("The valid values are:\n"),
9448 _("The default is \"std\"."));
9449 helptext = ui_file_xstrdup (stb, NULL);
9450 ui_file_delete (stb);
9452 add_setshow_enum_cmd("disassembler", no_class,
9453 valid_disassembly_styles, &disassembly_style,
9454 _("Set the disassembly style."),
9455 _("Show the disassembly style."),
9457 set_disassembly_style_sfunc,
9458 NULL, /* FIXME: i18n: The disassembly style is
9460 &setarmcmdlist, &showarmcmdlist);
9462 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9463 _("Set usage of ARM 32-bit mode."),
9464 _("Show usage of ARM 32-bit mode."),
9465 _("When off, a 26-bit PC will be used."),
9467 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9469 &setarmcmdlist, &showarmcmdlist);
9471 /* Add a command to allow the user to force the FPU model. */
9472 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9473 _("Set the floating point type."),
9474 _("Show the floating point type."),
9475 _("auto - Determine the FP typefrom the OS-ABI.\n\
9476 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9477 fpa - FPA co-processor (GCC compiled).\n\
9478 softvfp - Software FP with pure-endian doubles.\n\
9479 vfp - VFP co-processor."),
9480 set_fp_model_sfunc, show_fp_model,
9481 &setarmcmdlist, &showarmcmdlist);
9483 /* Add a command to allow the user to force the ABI. */
9484 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9487 NULL, arm_set_abi, arm_show_abi,
9488 &setarmcmdlist, &showarmcmdlist);
9490 /* Add two commands to allow the user to force the assumed
9492 add_setshow_enum_cmd ("fallback-mode", class_support,
9493 arm_mode_strings, &arm_fallback_mode_string,
9494 _("Set the mode assumed when symbols are unavailable."),
9495 _("Show the mode assumed when symbols are unavailable."),
9496 NULL, NULL, arm_show_fallback_mode,
9497 &setarmcmdlist, &showarmcmdlist);
9498 add_setshow_enum_cmd ("force-mode", class_support,
9499 arm_mode_strings, &arm_force_mode_string,
9500 _("Set the mode assumed even when symbols are available."),
9501 _("Show the mode assumed even when symbols are available."),
9502 NULL, NULL, arm_show_force_mode,
9503 &setarmcmdlist, &showarmcmdlist);
9505 /* Debugging flag. */
9506 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9507 _("Set ARM debugging."),
9508 _("Show ARM debugging."),
9509 _("When on, arm-specific debugging is enabled."),
9511 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9512 &setdebuglist, &showdebuglist);
9515 /* ARM-reversible process record data structures. */
9517 #define ARM_INSN_SIZE_BYTES 4
9518 #define THUMB_INSN_SIZE_BYTES 2
9519 #define THUMB2_INSN_SIZE_BYTES 4
9522 /* Position of the bit within a 32-bit ARM instruction
9523 that defines whether the instruction is a load or store. */
9524 #define INSN_S_L_BIT_NUM 20
9526 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9529 unsigned int reg_len = LENGTH; \
9532 REGS = XNEWVEC (uint32_t, reg_len); \
9533 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9538 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9541 unsigned int mem_len = LENGTH; \
9544 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9545 memcpy(&MEMS->len, &RECORD_BUF[0], \
9546 sizeof(struct arm_mem_r) * LENGTH); \
9551 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9552 #define INSN_RECORDED(ARM_RECORD) \
9553 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9555 /* ARM memory record structure. */
9558 uint32_t len; /* Record length. */
9559 uint32_t addr; /* Memory address. */
9562 /* ARM instruction record contains opcode of current insn
9563 and execution state (before entry to decode_insn()),
9564 contains list of to-be-modified registers and
9565 memory blocks (on return from decode_insn()). */
9567 typedef struct insn_decode_record_t
9569 struct gdbarch *gdbarch;
9570 struct regcache *regcache;
9571 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9572 uint32_t arm_insn; /* Should accommodate thumb. */
9573 uint32_t cond; /* Condition code. */
9574 uint32_t opcode; /* Insn opcode. */
9575 uint32_t decode; /* Insn decode bits. */
9576 uint32_t mem_rec_count; /* No of mem records. */
9577 uint32_t reg_rec_count; /* No of reg records. */
9578 uint32_t *arm_regs; /* Registers to be saved for this record. */
9579 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9580 } insn_decode_record;
9583 /* Checks ARM SBZ and SBO mandatory fields. */
9586 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9588 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9607 enum arm_record_result
9609 ARM_RECORD_SUCCESS = 0,
9610 ARM_RECORD_FAILURE = 1
9617 } arm_record_strx_t;
9628 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9629 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9632 struct regcache *reg_cache = arm_insn_r->regcache;
9633 ULONGEST u_regval[2]= {0};
9635 uint32_t reg_src1 = 0, reg_src2 = 0;
9636 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9638 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9639 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9641 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9643 /* 1) Handle misc store, immediate offset. */
9644 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9645 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9646 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9647 regcache_raw_read_unsigned (reg_cache, reg_src1,
9649 if (ARM_PC_REGNUM == reg_src1)
9651 /* If R15 was used as Rn, hence current PC+8. */
9652 u_regval[0] = u_regval[0] + 8;
9654 offset_8 = (immed_high << 4) | immed_low;
9655 /* Calculate target store address. */
9656 if (14 == arm_insn_r->opcode)
9658 tgt_mem_addr = u_regval[0] + offset_8;
9662 tgt_mem_addr = u_regval[0] - offset_8;
9664 if (ARM_RECORD_STRH == str_type)
9666 record_buf_mem[0] = 2;
9667 record_buf_mem[1] = tgt_mem_addr;
9668 arm_insn_r->mem_rec_count = 1;
9670 else if (ARM_RECORD_STRD == str_type)
9672 record_buf_mem[0] = 4;
9673 record_buf_mem[1] = tgt_mem_addr;
9674 record_buf_mem[2] = 4;
9675 record_buf_mem[3] = tgt_mem_addr + 4;
9676 arm_insn_r->mem_rec_count = 2;
9679 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9681 /* 2) Store, register offset. */
9683 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9685 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9686 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9687 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9690 /* If R15 was used as Rn, hence current PC+8. */
9691 u_regval[0] = u_regval[0] + 8;
9693 /* Calculate target store address, Rn +/- Rm, register offset. */
9694 if (12 == arm_insn_r->opcode)
9696 tgt_mem_addr = u_regval[0] + u_regval[1];
9700 tgt_mem_addr = u_regval[1] - u_regval[0];
9702 if (ARM_RECORD_STRH == str_type)
9704 record_buf_mem[0] = 2;
9705 record_buf_mem[1] = tgt_mem_addr;
9706 arm_insn_r->mem_rec_count = 1;
9708 else if (ARM_RECORD_STRD == str_type)
9710 record_buf_mem[0] = 4;
9711 record_buf_mem[1] = tgt_mem_addr;
9712 record_buf_mem[2] = 4;
9713 record_buf_mem[3] = tgt_mem_addr + 4;
9714 arm_insn_r->mem_rec_count = 2;
9717 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9718 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9720 /* 3) Store, immediate pre-indexed. */
9721 /* 5) Store, immediate post-indexed. */
9722 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9723 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9724 offset_8 = (immed_high << 4) | immed_low;
9725 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9726 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9727 /* Calculate target store address, Rn +/- Rm, register offset. */
9728 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9730 tgt_mem_addr = u_regval[0] + offset_8;
9734 tgt_mem_addr = u_regval[0] - offset_8;
9736 if (ARM_RECORD_STRH == str_type)
9738 record_buf_mem[0] = 2;
9739 record_buf_mem[1] = tgt_mem_addr;
9740 arm_insn_r->mem_rec_count = 1;
9742 else if (ARM_RECORD_STRD == str_type)
9744 record_buf_mem[0] = 4;
9745 record_buf_mem[1] = tgt_mem_addr;
9746 record_buf_mem[2] = 4;
9747 record_buf_mem[3] = tgt_mem_addr + 4;
9748 arm_insn_r->mem_rec_count = 2;
9750 /* Record Rn also as it changes. */
9751 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9752 arm_insn_r->reg_rec_count = 1;
9754 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9755 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9757 /* 4) Store, register pre-indexed. */
9758 /* 6) Store, register post -indexed. */
9759 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9760 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9761 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9762 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9763 /* Calculate target store address, Rn +/- Rm, register offset. */
9764 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9766 tgt_mem_addr = u_regval[0] + u_regval[1];
9770 tgt_mem_addr = u_regval[1] - u_regval[0];
9772 if (ARM_RECORD_STRH == str_type)
9774 record_buf_mem[0] = 2;
9775 record_buf_mem[1] = tgt_mem_addr;
9776 arm_insn_r->mem_rec_count = 1;
9778 else if (ARM_RECORD_STRD == str_type)
9780 record_buf_mem[0] = 4;
9781 record_buf_mem[1] = tgt_mem_addr;
9782 record_buf_mem[2] = 4;
9783 record_buf_mem[3] = tgt_mem_addr + 4;
9784 arm_insn_r->mem_rec_count = 2;
9786 /* Record Rn also as it changes. */
9787 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9788 arm_insn_r->reg_rec_count = 1;
9793 /* Handling ARM extension space insns. */
9796 arm_record_extension_space (insn_decode_record *arm_insn_r)
9798 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9799 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9800 uint32_t record_buf[8], record_buf_mem[8];
9801 uint32_t reg_src1 = 0;
9802 struct regcache *reg_cache = arm_insn_r->regcache;
9803 ULONGEST u_regval = 0;
9805 gdb_assert (!INSN_RECORDED(arm_insn_r));
9806 /* Handle unconditional insn extension space. */
9808 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9809 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9810 if (arm_insn_r->cond)
9812 /* PLD has no affect on architectural state, it just affects
9814 if (5 == ((opcode1 & 0xE0) >> 5))
9817 record_buf[0] = ARM_PS_REGNUM;
9818 record_buf[1] = ARM_LR_REGNUM;
9819 arm_insn_r->reg_rec_count = 2;
9821 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9825 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9826 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9829 /* Undefined instruction on ARM V5; need to handle if later
9830 versions define it. */
9833 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9834 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9835 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9837 /* Handle arithmetic insn extension space. */
9838 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9839 && !INSN_RECORDED(arm_insn_r))
9841 /* Handle MLA(S) and MUL(S). */
9842 if (0 <= insn_op1 && 3 >= insn_op1)
9844 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9845 record_buf[1] = ARM_PS_REGNUM;
9846 arm_insn_r->reg_rec_count = 2;
9848 else if (4 <= insn_op1 && 15 >= insn_op1)
9850 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9851 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9852 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9853 record_buf[2] = ARM_PS_REGNUM;
9854 arm_insn_r->reg_rec_count = 3;
9858 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9859 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9860 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9862 /* Handle control insn extension space. */
9864 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9865 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9867 if (!bit (arm_insn_r->arm_insn,25))
9869 if (!bits (arm_insn_r->arm_insn, 4, 7))
9871 if ((0 == insn_op1) || (2 == insn_op1))
9874 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9875 arm_insn_r->reg_rec_count = 1;
9877 else if (1 == insn_op1)
9879 /* CSPR is going to be changed. */
9880 record_buf[0] = ARM_PS_REGNUM;
9881 arm_insn_r->reg_rec_count = 1;
9883 else if (3 == insn_op1)
9885 /* SPSR is going to be changed. */
9886 /* We need to get SPSR value, which is yet to be done. */
9887 printf_unfiltered (_("Process record does not support "
9888 "instruction 0x%0x at address %s.\n"),
9889 arm_insn_r->arm_insn,
9890 paddress (arm_insn_r->gdbarch,
9891 arm_insn_r->this_addr));
9895 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9900 record_buf[0] = ARM_PS_REGNUM;
9901 arm_insn_r->reg_rec_count = 1;
9903 else if (3 == insn_op1)
9906 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9907 arm_insn_r->reg_rec_count = 1;
9910 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9913 record_buf[0] = ARM_PS_REGNUM;
9914 record_buf[1] = ARM_LR_REGNUM;
9915 arm_insn_r->reg_rec_count = 2;
9917 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9919 /* QADD, QSUB, QDADD, QDSUB */
9920 record_buf[0] = ARM_PS_REGNUM;
9921 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9922 arm_insn_r->reg_rec_count = 2;
9924 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9927 record_buf[0] = ARM_PS_REGNUM;
9928 record_buf[1] = ARM_LR_REGNUM;
9929 arm_insn_r->reg_rec_count = 2;
9931 /* Save SPSR also;how? */
9932 printf_unfiltered (_("Process record does not support "
9933 "instruction 0x%0x at address %s.\n"),
9934 arm_insn_r->arm_insn,
9935 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
9938 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9939 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9940 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9941 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9944 if (0 == insn_op1 || 1 == insn_op1)
9946 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9947 /* We dont do optimization for SMULW<y> where we
9949 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9950 record_buf[1] = ARM_PS_REGNUM;
9951 arm_insn_r->reg_rec_count = 2;
9953 else if (2 == insn_op1)
9956 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9957 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9958 arm_insn_r->reg_rec_count = 2;
9960 else if (3 == insn_op1)
9963 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9964 arm_insn_r->reg_rec_count = 1;
9970 /* MSR : immediate form. */
9973 /* CSPR is going to be changed. */
9974 record_buf[0] = ARM_PS_REGNUM;
9975 arm_insn_r->reg_rec_count = 1;
9977 else if (3 == insn_op1)
9979 /* SPSR is going to be changed. */
9980 /* we need to get SPSR value, which is yet to be done */
9981 printf_unfiltered (_("Process record does not support "
9982 "instruction 0x%0x at address %s.\n"),
9983 arm_insn_r->arm_insn,
9984 paddress (arm_insn_r->gdbarch,
9985 arm_insn_r->this_addr));
9991 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9992 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
9993 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
9995 /* Handle load/store insn extension space. */
9997 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
9998 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
9999 && !INSN_RECORDED(arm_insn_r))
10004 /* These insn, changes register and memory as well. */
10005 /* SWP or SWPB insn. */
10006 /* Get memory address given by Rn. */
10007 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10008 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10009 /* SWP insn ?, swaps word. */
10010 if (8 == arm_insn_r->opcode)
10012 record_buf_mem[0] = 4;
10016 /* SWPB insn, swaps only byte. */
10017 record_buf_mem[0] = 1;
10019 record_buf_mem[1] = u_regval;
10020 arm_insn_r->mem_rec_count = 1;
10021 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10022 arm_insn_r->reg_rec_count = 1;
10024 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10027 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10030 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10034 record_buf[1] = record_buf[0] + 1;
10035 arm_insn_r->reg_rec_count = 2;
10037 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10040 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10043 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10045 /* LDRH, LDRSB, LDRSH. */
10046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10047 arm_insn_r->reg_rec_count = 1;
10052 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10053 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10054 && !INSN_RECORDED(arm_insn_r))
10057 /* Handle coprocessor insn extension space. */
10060 /* To be done for ARMv5 and later; as of now we return -1. */
10062 printf_unfiltered (_("Process record does not support instruction x%0x "
10063 "at address %s.\n"),arm_insn_r->arm_insn,
10064 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10067 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10068 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10073 /* Handling opcode 000 insns. */
10076 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10078 struct regcache *reg_cache = arm_insn_r->regcache;
10079 uint32_t record_buf[8], record_buf_mem[8];
10080 ULONGEST u_regval[2] = {0};
10082 uint32_t reg_src1 = 0, reg_dest = 0;
10083 uint32_t opcode1 = 0;
10085 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10086 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10087 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10089 /* Data processing insn /multiply insn. */
10090 if (9 == arm_insn_r->decode
10091 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10092 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10094 /* Handle multiply instructions. */
10095 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10096 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10098 /* Handle MLA and MUL. */
10099 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10100 record_buf[1] = ARM_PS_REGNUM;
10101 arm_insn_r->reg_rec_count = 2;
10103 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10105 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10106 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10107 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10108 record_buf[2] = ARM_PS_REGNUM;
10109 arm_insn_r->reg_rec_count = 3;
10112 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10113 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10115 /* Handle misc load insns, as 20th bit (L = 1). */
10116 /* LDR insn has a capability to do branching, if
10117 MOV LR, PC is precceded by LDR insn having Rn as R15
10118 in that case, it emulates branch and link insn, and hence we
10119 need to save CSPR and PC as well. I am not sure this is right
10120 place; as opcode = 010 LDR insn make this happen, if R15 was
10122 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10123 if (15 != reg_dest)
10125 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10126 arm_insn_r->reg_rec_count = 1;
10130 record_buf[0] = reg_dest;
10131 record_buf[1] = ARM_PS_REGNUM;
10132 arm_insn_r->reg_rec_count = 2;
10135 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10136 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10137 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10138 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10140 /* Handle MSR insn. */
10141 if (9 == arm_insn_r->opcode)
10143 /* CSPR is going to be changed. */
10144 record_buf[0] = ARM_PS_REGNUM;
10145 arm_insn_r->reg_rec_count = 1;
10149 /* SPSR is going to be changed. */
10150 /* How to read SPSR value? */
10151 printf_unfiltered (_("Process record does not support instruction "
10152 "0x%0x at address %s.\n"),
10153 arm_insn_r->arm_insn,
10154 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10158 else if (9 == arm_insn_r->decode
10159 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10160 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10162 /* Handling SWP, SWPB. */
10163 /* These insn, changes register and memory as well. */
10164 /* SWP or SWPB insn. */
10166 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10167 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10168 /* SWP insn ?, swaps word. */
10169 if (8 == arm_insn_r->opcode)
10171 record_buf_mem[0] = 4;
10175 /* SWPB insn, swaps only byte. */
10176 record_buf_mem[0] = 1;
10178 record_buf_mem[1] = u_regval[0];
10179 arm_insn_r->mem_rec_count = 1;
10180 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10181 arm_insn_r->reg_rec_count = 1;
10183 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10184 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10186 /* Handle BLX, branch and link/exchange. */
10187 if (9 == arm_insn_r->opcode)
10189 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10190 and R14 stores the return address. */
10191 record_buf[0] = ARM_PS_REGNUM;
10192 record_buf[1] = ARM_LR_REGNUM;
10193 arm_insn_r->reg_rec_count = 2;
10196 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10198 /* Handle enhanced software breakpoint insn, BKPT. */
10199 /* CPSR is changed to be executed in ARM state, disabling normal
10200 interrupts, entering abort mode. */
10201 /* According to high vector configuration PC is set. */
10202 /* user hit breakpoint and type reverse, in
10203 that case, we need to go back with previous CPSR and
10204 Program Counter. */
10205 record_buf[0] = ARM_PS_REGNUM;
10206 record_buf[1] = ARM_LR_REGNUM;
10207 arm_insn_r->reg_rec_count = 2;
10209 /* Save SPSR also; how? */
10210 printf_unfiltered (_("Process record does not support instruction "
10211 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10212 paddress (arm_insn_r->gdbarch,
10213 arm_insn_r->this_addr));
10216 else if (11 == arm_insn_r->decode
10217 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10219 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10221 /* Handle str(x) insn */
10222 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10225 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10226 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10228 /* Handle BX, branch and link/exchange. */
10229 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10230 record_buf[0] = ARM_PS_REGNUM;
10231 arm_insn_r->reg_rec_count = 1;
10233 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10234 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10235 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10237 /* Count leading zeros: CLZ. */
10238 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10239 arm_insn_r->reg_rec_count = 1;
10241 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10242 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10243 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10244 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10247 /* Handle MRS insn. */
10248 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10249 arm_insn_r->reg_rec_count = 1;
10251 else if (arm_insn_r->opcode <= 15)
10253 /* Normal data processing insns. */
10254 /* Out of 11 shifter operands mode, all the insn modifies destination
10255 register, which is specified by 13-16 decode. */
10256 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10257 record_buf[1] = ARM_PS_REGNUM;
10258 arm_insn_r->reg_rec_count = 2;
10265 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10266 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10270 /* Handling opcode 001 insns. */
10273 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10275 uint32_t record_buf[8], record_buf_mem[8];
10277 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10278 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10280 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10281 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10282 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10285 /* Handle MSR insn. */
10286 if (9 == arm_insn_r->opcode)
10288 /* CSPR is going to be changed. */
10289 record_buf[0] = ARM_PS_REGNUM;
10290 arm_insn_r->reg_rec_count = 1;
10294 /* SPSR is going to be changed. */
10297 else if (arm_insn_r->opcode <= 15)
10299 /* Normal data processing insns. */
10300 /* Out of 11 shifter operands mode, all the insn modifies destination
10301 register, which is specified by 13-16 decode. */
10302 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10303 record_buf[1] = ARM_PS_REGNUM;
10304 arm_insn_r->reg_rec_count = 2;
10311 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10312 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10316 /* Handle ARM mode instructions with opcode 010. */
10319 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10321 struct regcache *reg_cache = arm_insn_r->regcache;
10323 uint32_t reg_base , reg_dest;
10324 uint32_t offset_12, tgt_mem_addr;
10325 uint32_t record_buf[8], record_buf_mem[8];
10326 unsigned char wback;
10329 /* Calculate wback. */
10330 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10331 || (bit (arm_insn_r->arm_insn, 21) == 1);
10333 arm_insn_r->reg_rec_count = 0;
10334 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10336 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10338 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10341 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10342 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10344 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10345 preceeds a LDR instruction having R15 as reg_base, it
10346 emulates a branch and link instruction, and hence we need to save
10347 CPSR and PC as well. */
10348 if (ARM_PC_REGNUM == reg_dest)
10349 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10351 /* If wback is true, also save the base register, which is going to be
10354 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10358 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10360 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10361 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10363 /* Handle bit U. */
10364 if (bit (arm_insn_r->arm_insn, 23))
10366 /* U == 1: Add the offset. */
10367 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10371 /* U == 0: subtract the offset. */
10372 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10375 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10377 if (bit (arm_insn_r->arm_insn, 22))
10379 /* STRB and STRBT: 1 byte. */
10380 record_buf_mem[0] = 1;
10384 /* STR and STRT: 4 bytes. */
10385 record_buf_mem[0] = 4;
10388 /* Handle bit P. */
10389 if (bit (arm_insn_r->arm_insn, 24))
10390 record_buf_mem[1] = tgt_mem_addr;
10392 record_buf_mem[1] = (uint32_t) u_regval;
10394 arm_insn_r->mem_rec_count = 1;
10396 /* If wback is true, also save the base register, which is going to be
10399 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10402 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10403 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10407 /* Handling opcode 011 insns. */
10410 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10412 struct regcache *reg_cache = arm_insn_r->regcache;
10414 uint32_t shift_imm = 0;
10415 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10416 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10417 uint32_t record_buf[8], record_buf_mem[8];
10420 ULONGEST u_regval[2];
10422 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10423 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10425 /* Handle enhanced store insns and LDRD DSP insn,
10426 order begins according to addressing modes for store insns
10430 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10432 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10433 /* LDR insn has a capability to do branching, if
10434 MOV LR, PC is precedded by LDR insn having Rn as R15
10435 in that case, it emulates branch and link insn, and hence we
10436 need to save CSPR and PC as well. */
10437 if (15 != reg_dest)
10439 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10440 arm_insn_r->reg_rec_count = 1;
10444 record_buf[0] = reg_dest;
10445 record_buf[1] = ARM_PS_REGNUM;
10446 arm_insn_r->reg_rec_count = 2;
10451 if (! bits (arm_insn_r->arm_insn, 4, 11))
10453 /* Store insn, register offset and register pre-indexed,
10454 register post-indexed. */
10456 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10458 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10459 regcache_raw_read_unsigned (reg_cache, reg_src1
10461 regcache_raw_read_unsigned (reg_cache, reg_src2
10463 if (15 == reg_src2)
10465 /* If R15 was used as Rn, hence current PC+8. */
10466 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10467 u_regval[0] = u_regval[0] + 8;
10469 /* Calculate target store address, Rn +/- Rm, register offset. */
10471 if (bit (arm_insn_r->arm_insn, 23))
10473 tgt_mem_addr = u_regval[0] + u_regval[1];
10477 tgt_mem_addr = u_regval[1] - u_regval[0];
10480 switch (arm_insn_r->opcode)
10494 record_buf_mem[0] = 4;
10509 record_buf_mem[0] = 1;
10513 gdb_assert_not_reached ("no decoding pattern found");
10516 record_buf_mem[1] = tgt_mem_addr;
10517 arm_insn_r->mem_rec_count = 1;
10519 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10520 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10521 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10522 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10523 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10524 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10527 /* Rn is going to be changed in pre-indexed mode and
10528 post-indexed mode as well. */
10529 record_buf[0] = reg_src2;
10530 arm_insn_r->reg_rec_count = 1;
10535 /* Store insn, scaled register offset; scaled pre-indexed. */
10536 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10538 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10540 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10541 /* Get shift_imm. */
10542 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10543 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10544 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10545 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10546 /* Offset_12 used as shift. */
10550 /* Offset_12 used as index. */
10551 offset_12 = u_regval[0] << shift_imm;
10555 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10561 if (bit (u_regval[0], 31))
10563 offset_12 = 0xFFFFFFFF;
10572 /* This is arithmetic shift. */
10573 offset_12 = s_word >> shift_imm;
10580 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10582 /* Get C flag value and shift it by 31. */
10583 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10584 | (u_regval[0]) >> 1);
10588 offset_12 = (u_regval[0] >> shift_imm) \
10590 (sizeof(uint32_t) - shift_imm));
10595 gdb_assert_not_reached ("no decoding pattern found");
10599 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10601 if (bit (arm_insn_r->arm_insn, 23))
10603 tgt_mem_addr = u_regval[1] + offset_12;
10607 tgt_mem_addr = u_regval[1] - offset_12;
10610 switch (arm_insn_r->opcode)
10624 record_buf_mem[0] = 4;
10639 record_buf_mem[0] = 1;
10643 gdb_assert_not_reached ("no decoding pattern found");
10646 record_buf_mem[1] = tgt_mem_addr;
10647 arm_insn_r->mem_rec_count = 1;
10649 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10650 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10651 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10652 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10653 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10654 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10657 /* Rn is going to be changed in register scaled pre-indexed
10658 mode,and scaled post indexed mode. */
10659 record_buf[0] = reg_src2;
10660 arm_insn_r->reg_rec_count = 1;
10665 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10666 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10670 /* Handle ARM mode instructions with opcode 100. */
10673 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10675 struct regcache *reg_cache = arm_insn_r->regcache;
10676 uint32_t register_count = 0, register_bits;
10677 uint32_t reg_base, addr_mode;
10678 uint32_t record_buf[24], record_buf_mem[48];
10682 /* Fetch the list of registers. */
10683 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10684 arm_insn_r->reg_rec_count = 0;
10686 /* Fetch the base register that contains the address we are loading data
10688 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10690 /* Calculate wback. */
10691 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10693 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10695 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10697 /* Find out which registers are going to be loaded from memory. */
10698 while (register_bits)
10700 if (register_bits & 0x00000001)
10701 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10702 register_bits = register_bits >> 1;
10707 /* If wback is true, also save the base register, which is going to be
10710 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10712 /* Save the CPSR register. */
10713 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10717 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10719 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10721 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10723 /* Find out how many registers are going to be stored to memory. */
10724 while (register_bits)
10726 if (register_bits & 0x00000001)
10728 register_bits = register_bits >> 1;
10733 /* STMDA (STMED): Decrement after. */
10735 record_buf_mem[1] = (uint32_t) u_regval
10736 - register_count * INT_REGISTER_SIZE + 4;
10738 /* STM (STMIA, STMEA): Increment after. */
10740 record_buf_mem[1] = (uint32_t) u_regval;
10742 /* STMDB (STMFD): Decrement before. */
10744 record_buf_mem[1] = (uint32_t) u_regval
10745 - register_count * INT_REGISTER_SIZE;
10747 /* STMIB (STMFA): Increment before. */
10749 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10752 gdb_assert_not_reached ("no decoding pattern found");
10756 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10757 arm_insn_r->mem_rec_count = 1;
10759 /* If wback is true, also save the base register, which is going to be
10762 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10765 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10766 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10770 /* Handling opcode 101 insns. */
10773 arm_record_b_bl (insn_decode_record *arm_insn_r)
10775 uint32_t record_buf[8];
10777 /* Handle B, BL, BLX(1) insns. */
10778 /* B simply branches so we do nothing here. */
10779 /* Note: BLX(1) doesnt fall here but instead it falls into
10780 extension space. */
10781 if (bit (arm_insn_r->arm_insn, 24))
10783 record_buf[0] = ARM_LR_REGNUM;
10784 arm_insn_r->reg_rec_count = 1;
10787 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10793 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10795 printf_unfiltered (_("Process record does not support instruction "
10796 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10797 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10802 /* Record handler for vector data transfer instructions. */
10805 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10807 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10808 uint32_t record_buf[4];
10810 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10811 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10812 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10813 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10814 bit_l = bit (arm_insn_r->arm_insn, 20);
10815 bit_c = bit (arm_insn_r->arm_insn, 8);
10817 /* Handle VMOV instruction. */
10818 if (bit_l && bit_c)
10820 record_buf[0] = reg_t;
10821 arm_insn_r->reg_rec_count = 1;
10823 else if (bit_l && !bit_c)
10825 /* Handle VMOV instruction. */
10826 if (bits_a == 0x00)
10828 if (bit (arm_insn_r->arm_insn, 20))
10829 record_buf[0] = reg_t;
10831 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10834 arm_insn_r->reg_rec_count = 1;
10836 /* Handle VMRS instruction. */
10837 else if (bits_a == 0x07)
10840 reg_t = ARM_PS_REGNUM;
10842 record_buf[0] = reg_t;
10843 arm_insn_r->reg_rec_count = 1;
10846 else if (!bit_l && !bit_c)
10848 /* Handle VMOV instruction. */
10849 if (bits_a == 0x00)
10851 if (bit (arm_insn_r->arm_insn, 20))
10852 record_buf[0] = reg_t;
10854 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10857 arm_insn_r->reg_rec_count = 1;
10859 /* Handle VMSR instruction. */
10860 else if (bits_a == 0x07)
10862 record_buf[0] = ARM_FPSCR_REGNUM;
10863 arm_insn_r->reg_rec_count = 1;
10866 else if (!bit_l && bit_c)
10868 /* Handle VMOV instruction. */
10869 if (!(bits_a & 0x04))
10871 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
10873 arm_insn_r->reg_rec_count = 1;
10875 /* Handle VDUP instruction. */
10878 if (bit (arm_insn_r->arm_insn, 21))
10880 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10881 record_buf[0] = reg_v + ARM_D0_REGNUM;
10882 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
10883 arm_insn_r->reg_rec_count = 2;
10887 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10888 record_buf[0] = reg_v + ARM_D0_REGNUM;
10889 arm_insn_r->reg_rec_count = 1;
10894 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10898 /* Record handler for extension register load/store instructions. */
10901 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
10903 uint32_t opcode, single_reg;
10904 uint8_t op_vldm_vstm;
10905 uint32_t record_buf[8], record_buf_mem[128];
10906 ULONGEST u_regval = 0;
10908 struct regcache *reg_cache = arm_insn_r->regcache;
10909 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10911 opcode = bits (arm_insn_r->arm_insn, 20, 24);
10912 single_reg = bit (arm_insn_r->arm_insn, 8);
10913 op_vldm_vstm = opcode & 0x1b;
10915 /* Handle VMOV instructions. */
10916 if ((opcode & 0x1e) == 0x04)
10918 if (bit (arm_insn_r->arm_insn, 4))
10920 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10921 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10922 arm_insn_r->reg_rec_count = 2;
10926 uint8_t reg_m = ((bits (arm_insn_r->arm_insn, 0, 3) << 1)
10927 | bit (arm_insn_r->arm_insn, 5));
10931 record_buf[0] = num_regs + reg_m;
10932 record_buf[1] = num_regs + reg_m + 1;
10933 arm_insn_r->reg_rec_count = 2;
10937 record_buf[0] = reg_m + ARM_D0_REGNUM;
10938 arm_insn_r->reg_rec_count = 1;
10942 /* Handle VSTM and VPUSH instructions. */
10943 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
10944 || op_vldm_vstm == 0x12)
10946 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
10947 uint32_t memory_index = 0;
10949 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
10950 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
10951 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
10952 imm_off32 = imm_off8 << 24;
10953 memory_count = imm_off8;
10955 if (bit (arm_insn_r->arm_insn, 23))
10956 start_address = u_regval;
10958 start_address = u_regval - imm_off32;
10960 if (bit (arm_insn_r->arm_insn, 21))
10962 record_buf[0] = reg_rn;
10963 arm_insn_r->reg_rec_count = 1;
10966 while (memory_count > 0)
10970 record_buf_mem[memory_index] = start_address;
10971 record_buf_mem[memory_index + 1] = 4;
10972 start_address = start_address + 4;
10973 memory_index = memory_index + 2;
10977 record_buf_mem[memory_index] = start_address;
10978 record_buf_mem[memory_index + 1] = 4;
10979 record_buf_mem[memory_index + 2] = start_address + 4;
10980 record_buf_mem[memory_index + 3] = 4;
10981 start_address = start_address + 8;
10982 memory_index = memory_index + 4;
10986 arm_insn_r->mem_rec_count = (memory_index >> 1);
10988 /* Handle VLDM instructions. */
10989 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
10990 || op_vldm_vstm == 0x13)
10992 uint32_t reg_count, reg_vd;
10993 uint32_t reg_index = 0;
10995 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
10996 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
10999 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11001 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11003 if (bit (arm_insn_r->arm_insn, 21))
11004 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11006 while (reg_count > 0)
11009 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
11011 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11015 arm_insn_r->reg_rec_count = reg_index;
11017 /* VSTR Vector store register. */
11018 else if ((opcode & 0x13) == 0x10)
11020 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11021 uint32_t memory_index = 0;
11023 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11024 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11025 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11026 imm_off32 = imm_off8 << 24;
11028 if (bit (arm_insn_r->arm_insn, 23))
11029 start_address = u_regval + imm_off32;
11031 start_address = u_regval - imm_off32;
11035 record_buf_mem[memory_index] = start_address;
11036 record_buf_mem[memory_index + 1] = 4;
11037 arm_insn_r->mem_rec_count = 1;
11041 record_buf_mem[memory_index] = start_address;
11042 record_buf_mem[memory_index + 1] = 4;
11043 record_buf_mem[memory_index + 2] = start_address + 4;
11044 record_buf_mem[memory_index + 3] = 4;
11045 arm_insn_r->mem_rec_count = 2;
11048 /* VLDR Vector load register. */
11049 else if ((opcode & 0x13) == 0x11)
11051 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11055 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11056 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11060 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11061 record_buf[0] = num_regs + reg_vd;
11063 arm_insn_r->reg_rec_count = 1;
11066 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11067 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11071 /* Record handler for arm/thumb mode VFP data processing instructions. */
11074 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11076 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11077 uint32_t record_buf[4];
11078 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11079 enum insn_types curr_insn_type = INSN_INV;
11081 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11082 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11083 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11084 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11085 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11086 bit_d = bit (arm_insn_r->arm_insn, 22);
11087 opc1 = opc1 & 0x04;
11089 /* Handle VMLA, VMLS. */
11092 if (bit (arm_insn_r->arm_insn, 10))
11094 if (bit (arm_insn_r->arm_insn, 6))
11095 curr_insn_type = INSN_T0;
11097 curr_insn_type = INSN_T1;
11102 curr_insn_type = INSN_T1;
11104 curr_insn_type = INSN_T2;
11107 /* Handle VNMLA, VNMLS, VNMUL. */
11108 else if (opc1 == 0x01)
11111 curr_insn_type = INSN_T1;
11113 curr_insn_type = INSN_T2;
11116 else if (opc1 == 0x02 && !(opc3 & 0x01))
11118 if (bit (arm_insn_r->arm_insn, 10))
11120 if (bit (arm_insn_r->arm_insn, 6))
11121 curr_insn_type = INSN_T0;
11123 curr_insn_type = INSN_T1;
11128 curr_insn_type = INSN_T1;
11130 curr_insn_type = INSN_T2;
11133 /* Handle VADD, VSUB. */
11134 else if (opc1 == 0x03)
11136 if (!bit (arm_insn_r->arm_insn, 9))
11138 if (bit (arm_insn_r->arm_insn, 6))
11139 curr_insn_type = INSN_T0;
11141 curr_insn_type = INSN_T1;
11146 curr_insn_type = INSN_T1;
11148 curr_insn_type = INSN_T2;
11152 else if (opc1 == 0x0b)
11155 curr_insn_type = INSN_T1;
11157 curr_insn_type = INSN_T2;
11159 /* Handle all other vfp data processing instructions. */
11160 else if (opc1 == 0x0b)
11163 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11165 if (bit (arm_insn_r->arm_insn, 4))
11167 if (bit (arm_insn_r->arm_insn, 6))
11168 curr_insn_type = INSN_T0;
11170 curr_insn_type = INSN_T1;
11175 curr_insn_type = INSN_T1;
11177 curr_insn_type = INSN_T2;
11180 /* Handle VNEG and VABS. */
11181 else if ((opc2 == 0x01 && opc3 == 0x01)
11182 || (opc2 == 0x00 && opc3 == 0x03))
11184 if (!bit (arm_insn_r->arm_insn, 11))
11186 if (bit (arm_insn_r->arm_insn, 6))
11187 curr_insn_type = INSN_T0;
11189 curr_insn_type = INSN_T1;
11194 curr_insn_type = INSN_T1;
11196 curr_insn_type = INSN_T2;
11199 /* Handle VSQRT. */
11200 else if (opc2 == 0x01 && opc3 == 0x03)
11203 curr_insn_type = INSN_T1;
11205 curr_insn_type = INSN_T2;
11208 else if (opc2 == 0x07 && opc3 == 0x03)
11211 curr_insn_type = INSN_T1;
11213 curr_insn_type = INSN_T2;
11215 else if (opc3 & 0x01)
11218 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11220 if (!bit (arm_insn_r->arm_insn, 18))
11221 curr_insn_type = INSN_T2;
11225 curr_insn_type = INSN_T1;
11227 curr_insn_type = INSN_T2;
11231 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11234 curr_insn_type = INSN_T1;
11236 curr_insn_type = INSN_T2;
11238 /* Handle VCVTB, VCVTT. */
11239 else if ((opc2 & 0x0e) == 0x02)
11240 curr_insn_type = INSN_T2;
11241 /* Handle VCMP, VCMPE. */
11242 else if ((opc2 & 0x0e) == 0x04)
11243 curr_insn_type = INSN_T3;
11247 switch (curr_insn_type)
11250 reg_vd = reg_vd | (bit_d << 4);
11251 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11252 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11253 arm_insn_r->reg_rec_count = 2;
11257 reg_vd = reg_vd | (bit_d << 4);
11258 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11259 arm_insn_r->reg_rec_count = 1;
11263 reg_vd = (reg_vd << 1) | bit_d;
11264 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11265 arm_insn_r->reg_rec_count = 1;
11269 record_buf[0] = ARM_FPSCR_REGNUM;
11270 arm_insn_r->reg_rec_count = 1;
11274 gdb_assert_not_reached ("no decoding pattern found");
11278 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11282 /* Handling opcode 110 insns. */
11285 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11287 uint32_t op1, op1_ebit, coproc;
11289 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11290 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11291 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11293 if ((coproc & 0x0e) == 0x0a)
11295 /* Handle extension register ld/st instructions. */
11297 return arm_record_exreg_ld_st_insn (arm_insn_r);
11299 /* 64-bit transfers between arm core and extension registers. */
11300 if ((op1 & 0x3e) == 0x04)
11301 return arm_record_exreg_ld_st_insn (arm_insn_r);
11305 /* Handle coprocessor ld/st instructions. */
11310 return arm_record_unsupported_insn (arm_insn_r);
11313 return arm_record_unsupported_insn (arm_insn_r);
11316 /* Move to coprocessor from two arm core registers. */
11318 return arm_record_unsupported_insn (arm_insn_r);
11320 /* Move to two arm core registers from coprocessor. */
11325 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11326 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11327 arm_insn_r->reg_rec_count = 2;
11329 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11333 return arm_record_unsupported_insn (arm_insn_r);
11336 /* Handling opcode 111 insns. */
11339 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11341 uint32_t op, op1_sbit, op1_ebit, coproc;
11342 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11343 struct regcache *reg_cache = arm_insn_r->regcache;
11345 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11346 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11347 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11348 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11349 op = bit (arm_insn_r->arm_insn, 4);
11351 /* Handle arm SWI/SVC system call instructions. */
11354 if (tdep->arm_syscall_record != NULL)
11356 ULONGEST svc_operand, svc_number;
11358 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11360 if (svc_operand) /* OABI. */
11361 svc_number = svc_operand - 0x900000;
11363 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11365 return tdep->arm_syscall_record (reg_cache, svc_number);
11369 printf_unfiltered (_("no syscall record support\n"));
11374 if ((coproc & 0x0e) == 0x0a)
11376 /* VFP data-processing instructions. */
11377 if (!op1_sbit && !op)
11378 return arm_record_vfp_data_proc_insn (arm_insn_r);
11380 /* Advanced SIMD, VFP instructions. */
11381 if (!op1_sbit && op)
11382 return arm_record_vdata_transfer_insn (arm_insn_r);
11386 /* Coprocessor data operations. */
11387 if (!op1_sbit && !op)
11388 return arm_record_unsupported_insn (arm_insn_r);
11390 /* Move to Coprocessor from ARM core register. */
11391 if (!op1_sbit && !op1_ebit && op)
11392 return arm_record_unsupported_insn (arm_insn_r);
11394 /* Move to arm core register from coprocessor. */
11395 if (!op1_sbit && op1_ebit && op)
11397 uint32_t record_buf[1];
11399 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11400 if (record_buf[0] == 15)
11401 record_buf[0] = ARM_PS_REGNUM;
11403 arm_insn_r->reg_rec_count = 1;
11404 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11410 return arm_record_unsupported_insn (arm_insn_r);
11413 /* Handling opcode 000 insns. */
11416 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11418 uint32_t record_buf[8];
11419 uint32_t reg_src1 = 0;
11421 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11423 record_buf[0] = ARM_PS_REGNUM;
11424 record_buf[1] = reg_src1;
11425 thumb_insn_r->reg_rec_count = 2;
11427 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11433 /* Handling opcode 001 insns. */
11436 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11438 uint32_t record_buf[8];
11439 uint32_t reg_src1 = 0;
11441 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11443 record_buf[0] = ARM_PS_REGNUM;
11444 record_buf[1] = reg_src1;
11445 thumb_insn_r->reg_rec_count = 2;
11447 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11452 /* Handling opcode 010 insns. */
11455 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11457 struct regcache *reg_cache = thumb_insn_r->regcache;
11458 uint32_t record_buf[8], record_buf_mem[8];
11460 uint32_t reg_src1 = 0, reg_src2 = 0;
11461 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11463 ULONGEST u_regval[2] = {0};
11465 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11467 if (bit (thumb_insn_r->arm_insn, 12))
11469 /* Handle load/store register offset. */
11470 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11471 if (opcode2 >= 12 && opcode2 <= 15)
11473 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11474 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11475 record_buf[0] = reg_src1;
11476 thumb_insn_r->reg_rec_count = 1;
11478 else if (opcode2 >= 8 && opcode2 <= 10)
11480 /* STR(2), STRB(2), STRH(2) . */
11481 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11482 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11483 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11484 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11486 record_buf_mem[0] = 4; /* STR (2). */
11487 else if (10 == opcode2)
11488 record_buf_mem[0] = 1; /* STRB (2). */
11489 else if (9 == opcode2)
11490 record_buf_mem[0] = 2; /* STRH (2). */
11491 record_buf_mem[1] = u_regval[0] + u_regval[1];
11492 thumb_insn_r->mem_rec_count = 1;
11495 else if (bit (thumb_insn_r->arm_insn, 11))
11497 /* Handle load from literal pool. */
11499 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11500 record_buf[0] = reg_src1;
11501 thumb_insn_r->reg_rec_count = 1;
11505 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11506 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11507 if ((3 == opcode2) && (!opcode3))
11509 /* Branch with exchange. */
11510 record_buf[0] = ARM_PS_REGNUM;
11511 thumb_insn_r->reg_rec_count = 1;
11515 /* Format 8; special data processing insns. */
11516 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11517 record_buf[0] = ARM_PS_REGNUM;
11518 record_buf[1] = reg_src1;
11519 thumb_insn_r->reg_rec_count = 2;
11524 /* Format 5; data processing insns. */
11525 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11526 if (bit (thumb_insn_r->arm_insn, 7))
11528 reg_src1 = reg_src1 + 8;
11530 record_buf[0] = ARM_PS_REGNUM;
11531 record_buf[1] = reg_src1;
11532 thumb_insn_r->reg_rec_count = 2;
11535 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11536 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11542 /* Handling opcode 001 insns. */
11545 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11547 struct regcache *reg_cache = thumb_insn_r->regcache;
11548 uint32_t record_buf[8], record_buf_mem[8];
11550 uint32_t reg_src1 = 0;
11551 uint32_t opcode = 0, immed_5 = 0;
11553 ULONGEST u_regval = 0;
11555 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11560 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11561 record_buf[0] = reg_src1;
11562 thumb_insn_r->reg_rec_count = 1;
11567 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11568 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11569 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11570 record_buf_mem[0] = 4;
11571 record_buf_mem[1] = u_regval + (immed_5 * 4);
11572 thumb_insn_r->mem_rec_count = 1;
11575 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11576 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11582 /* Handling opcode 100 insns. */
11585 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11587 struct regcache *reg_cache = thumb_insn_r->regcache;
11588 uint32_t record_buf[8], record_buf_mem[8];
11590 uint32_t reg_src1 = 0;
11591 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11593 ULONGEST u_regval = 0;
11595 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11600 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11601 record_buf[0] = reg_src1;
11602 thumb_insn_r->reg_rec_count = 1;
11604 else if (1 == opcode)
11607 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11608 record_buf[0] = reg_src1;
11609 thumb_insn_r->reg_rec_count = 1;
11611 else if (2 == opcode)
11614 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11615 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11616 record_buf_mem[0] = 4;
11617 record_buf_mem[1] = u_regval + (immed_8 * 4);
11618 thumb_insn_r->mem_rec_count = 1;
11620 else if (0 == opcode)
11623 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11624 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11625 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11626 record_buf_mem[0] = 2;
11627 record_buf_mem[1] = u_regval + (immed_5 * 2);
11628 thumb_insn_r->mem_rec_count = 1;
11631 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11632 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11638 /* Handling opcode 101 insns. */
11641 thumb_record_misc (insn_decode_record *thumb_insn_r)
11643 struct regcache *reg_cache = thumb_insn_r->regcache;
11645 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11646 uint32_t register_bits = 0, register_count = 0;
11647 uint32_t index = 0, start_address = 0;
11648 uint32_t record_buf[24], record_buf_mem[48];
11651 ULONGEST u_regval = 0;
11653 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11654 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11655 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11660 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11661 while (register_bits)
11663 if (register_bits & 0x00000001)
11664 record_buf[index++] = register_count;
11665 register_bits = register_bits >> 1;
11668 record_buf[index++] = ARM_PS_REGNUM;
11669 record_buf[index++] = ARM_SP_REGNUM;
11670 thumb_insn_r->reg_rec_count = index;
11672 else if (10 == opcode2)
11675 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11676 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11677 while (register_bits)
11679 if (register_bits & 0x00000001)
11681 register_bits = register_bits >> 1;
11683 start_address = u_regval - \
11684 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11685 thumb_insn_r->mem_rec_count = register_count;
11686 while (register_count)
11688 record_buf_mem[(register_count * 2) - 1] = start_address;
11689 record_buf_mem[(register_count * 2) - 2] = 4;
11690 start_address = start_address + 4;
11693 record_buf[0] = ARM_SP_REGNUM;
11694 thumb_insn_r->reg_rec_count = 1;
11696 else if (0x1E == opcode1)
11699 /* Handle enhanced software breakpoint insn, BKPT. */
11700 /* CPSR is changed to be executed in ARM state, disabling normal
11701 interrupts, entering abort mode. */
11702 /* According to high vector configuration PC is set. */
11703 /* User hits breakpoint and type reverse, in that case, we need to go back with
11704 previous CPSR and Program Counter. */
11705 record_buf[0] = ARM_PS_REGNUM;
11706 record_buf[1] = ARM_LR_REGNUM;
11707 thumb_insn_r->reg_rec_count = 2;
11708 /* We need to save SPSR value, which is not yet done. */
11709 printf_unfiltered (_("Process record does not support instruction "
11710 "0x%0x at address %s.\n"),
11711 thumb_insn_r->arm_insn,
11712 paddress (thumb_insn_r->gdbarch,
11713 thumb_insn_r->this_addr));
11716 else if ((0 == opcode) || (1 == opcode))
11718 /* ADD(5), ADD(6). */
11719 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11720 record_buf[0] = reg_src1;
11721 thumb_insn_r->reg_rec_count = 1;
11723 else if (2 == opcode)
11725 /* ADD(7), SUB(4). */
11726 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11727 record_buf[0] = ARM_SP_REGNUM;
11728 thumb_insn_r->reg_rec_count = 1;
11731 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11732 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11738 /* Handling opcode 110 insns. */
11741 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11743 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11744 struct regcache *reg_cache = thumb_insn_r->regcache;
11746 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11747 uint32_t reg_src1 = 0;
11748 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11749 uint32_t index = 0, start_address = 0;
11750 uint32_t record_buf[24], record_buf_mem[48];
11752 ULONGEST u_regval = 0;
11754 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11755 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11761 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11763 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11764 while (register_bits)
11766 if (register_bits & 0x00000001)
11767 record_buf[index++] = register_count;
11768 register_bits = register_bits >> 1;
11771 record_buf[index++] = reg_src1;
11772 thumb_insn_r->reg_rec_count = index;
11774 else if (0 == opcode2)
11776 /* It handles both STMIA. */
11777 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11779 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11780 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11781 while (register_bits)
11783 if (register_bits & 0x00000001)
11785 register_bits = register_bits >> 1;
11787 start_address = u_regval;
11788 thumb_insn_r->mem_rec_count = register_count;
11789 while (register_count)
11791 record_buf_mem[(register_count * 2) - 1] = start_address;
11792 record_buf_mem[(register_count * 2) - 2] = 4;
11793 start_address = start_address + 4;
11797 else if (0x1F == opcode1)
11799 /* Handle arm syscall insn. */
11800 if (tdep->arm_syscall_record != NULL)
11802 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11803 ret = tdep->arm_syscall_record (reg_cache, u_regval);
11807 printf_unfiltered (_("no syscall record support\n"));
11812 /* B (1), conditional branch is automatically taken care in process_record,
11813 as PC is saved there. */
11815 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11816 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11822 /* Handling opcode 111 insns. */
11825 thumb_record_branch (insn_decode_record *thumb_insn_r)
11827 uint32_t record_buf[8];
11828 uint32_t bits_h = 0;
11830 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
11832 if (2 == bits_h || 3 == bits_h)
11835 record_buf[0] = ARM_LR_REGNUM;
11836 thumb_insn_r->reg_rec_count = 1;
11838 else if (1 == bits_h)
11841 record_buf[0] = ARM_PS_REGNUM;
11842 record_buf[1] = ARM_LR_REGNUM;
11843 thumb_insn_r->reg_rec_count = 2;
11846 /* B(2) is automatically taken care in process_record, as PC is
11849 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11854 /* Handler for thumb2 load/store multiple instructions. */
11857 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
11859 struct regcache *reg_cache = thumb2_insn_r->regcache;
11861 uint32_t reg_rn, op;
11862 uint32_t register_bits = 0, register_count = 0;
11863 uint32_t index = 0, start_address = 0;
11864 uint32_t record_buf[24], record_buf_mem[48];
11866 ULONGEST u_regval = 0;
11868 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11869 op = bits (thumb2_insn_r->arm_insn, 23, 24);
11871 if (0 == op || 3 == op)
11873 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11875 /* Handle RFE instruction. */
11876 record_buf[0] = ARM_PS_REGNUM;
11877 thumb2_insn_r->reg_rec_count = 1;
11881 /* Handle SRS instruction after reading banked SP. */
11882 return arm_record_unsupported_insn (thumb2_insn_r);
11885 else if (1 == op || 2 == op)
11887 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11889 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
11890 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11891 while (register_bits)
11893 if (register_bits & 0x00000001)
11894 record_buf[index++] = register_count;
11897 register_bits = register_bits >> 1;
11899 record_buf[index++] = reg_rn;
11900 record_buf[index++] = ARM_PS_REGNUM;
11901 thumb2_insn_r->reg_rec_count = index;
11905 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
11906 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11907 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11908 while (register_bits)
11910 if (register_bits & 0x00000001)
11913 register_bits = register_bits >> 1;
11918 /* Start address calculation for LDMDB/LDMEA. */
11919 start_address = u_regval;
11923 /* Start address calculation for LDMDB/LDMEA. */
11924 start_address = u_regval - register_count * 4;
11927 thumb2_insn_r->mem_rec_count = register_count;
11928 while (register_count)
11930 record_buf_mem[register_count * 2 - 1] = start_address;
11931 record_buf_mem[register_count * 2 - 2] = 4;
11932 start_address = start_address + 4;
11935 record_buf[0] = reg_rn;
11936 record_buf[1] = ARM_PS_REGNUM;
11937 thumb2_insn_r->reg_rec_count = 2;
11941 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
11943 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
11945 return ARM_RECORD_SUCCESS;
11948 /* Handler for thumb2 load/store (dual/exclusive) and table branch
11952 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
11954 struct regcache *reg_cache = thumb2_insn_r->regcache;
11956 uint32_t reg_rd, reg_rn, offset_imm;
11957 uint32_t reg_dest1, reg_dest2;
11958 uint32_t address, offset_addr;
11959 uint32_t record_buf[8], record_buf_mem[8];
11960 uint32_t op1, op2, op3;
11962 ULONGEST u_regval[2];
11964 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
11965 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
11966 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
11968 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11970 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
11972 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
11973 record_buf[0] = reg_dest1;
11974 record_buf[1] = ARM_PS_REGNUM;
11975 thumb2_insn_r->reg_rec_count = 2;
11978 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
11980 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
11981 record_buf[2] = reg_dest2;
11982 thumb2_insn_r->reg_rec_count = 3;
11987 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11988 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
11990 if (0 == op1 && 0 == op2)
11992 /* Handle STREX. */
11993 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
11994 address = u_regval[0] + (offset_imm * 4);
11995 record_buf_mem[0] = 4;
11996 record_buf_mem[1] = address;
11997 thumb2_insn_r->mem_rec_count = 1;
11998 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
11999 record_buf[0] = reg_rd;
12000 thumb2_insn_r->reg_rec_count = 1;
12002 else if (1 == op1 && 0 == op2)
12004 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12005 record_buf[0] = reg_rd;
12006 thumb2_insn_r->reg_rec_count = 1;
12007 address = u_regval[0];
12008 record_buf_mem[1] = address;
12012 /* Handle STREXB. */
12013 record_buf_mem[0] = 1;
12014 thumb2_insn_r->mem_rec_count = 1;
12018 /* Handle STREXH. */
12019 record_buf_mem[0] = 2 ;
12020 thumb2_insn_r->mem_rec_count = 1;
12024 /* Handle STREXD. */
12025 address = u_regval[0];
12026 record_buf_mem[0] = 4;
12027 record_buf_mem[2] = 4;
12028 record_buf_mem[3] = address + 4;
12029 thumb2_insn_r->mem_rec_count = 2;
12034 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12036 if (bit (thumb2_insn_r->arm_insn, 24))
12038 if (bit (thumb2_insn_r->arm_insn, 23))
12039 offset_addr = u_regval[0] + (offset_imm * 4);
12041 offset_addr = u_regval[0] - (offset_imm * 4);
12043 address = offset_addr;
12046 address = u_regval[0];
12048 record_buf_mem[0] = 4;
12049 record_buf_mem[1] = address;
12050 record_buf_mem[2] = 4;
12051 record_buf_mem[3] = address + 4;
12052 thumb2_insn_r->mem_rec_count = 2;
12053 record_buf[0] = reg_rn;
12054 thumb2_insn_r->reg_rec_count = 1;
12058 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12060 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12062 return ARM_RECORD_SUCCESS;
12065 /* Handler for thumb2 data processing (shift register and modified immediate)
12069 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12071 uint32_t reg_rd, op;
12072 uint32_t record_buf[8];
12074 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12075 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12077 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12079 record_buf[0] = ARM_PS_REGNUM;
12080 thumb2_insn_r->reg_rec_count = 1;
12084 record_buf[0] = reg_rd;
12085 record_buf[1] = ARM_PS_REGNUM;
12086 thumb2_insn_r->reg_rec_count = 2;
12089 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12091 return ARM_RECORD_SUCCESS;
12094 /* Generic handler for thumb2 instructions which effect destination and PS
12098 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12101 uint32_t record_buf[8];
12103 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12105 record_buf[0] = reg_rd;
12106 record_buf[1] = ARM_PS_REGNUM;
12107 thumb2_insn_r->reg_rec_count = 2;
12109 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12111 return ARM_RECORD_SUCCESS;
12114 /* Handler for thumb2 branch and miscellaneous control instructions. */
12117 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12119 uint32_t op, op1, op2;
12120 uint32_t record_buf[8];
12122 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12123 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12124 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12126 /* Handle MSR insn. */
12127 if (!(op1 & 0x2) && 0x38 == op)
12131 /* CPSR is going to be changed. */
12132 record_buf[0] = ARM_PS_REGNUM;
12133 thumb2_insn_r->reg_rec_count = 1;
12137 arm_record_unsupported_insn(thumb2_insn_r);
12141 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12144 record_buf[0] = ARM_PS_REGNUM;
12145 record_buf[1] = ARM_LR_REGNUM;
12146 thumb2_insn_r->reg_rec_count = 2;
12149 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12151 return ARM_RECORD_SUCCESS;
12154 /* Handler for thumb2 store single data item instructions. */
12157 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12159 struct regcache *reg_cache = thumb2_insn_r->regcache;
12161 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12162 uint32_t address, offset_addr;
12163 uint32_t record_buf[8], record_buf_mem[8];
12166 ULONGEST u_regval[2];
12168 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12169 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12170 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12171 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12173 if (bit (thumb2_insn_r->arm_insn, 23))
12176 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12177 offset_addr = u_regval[0] + offset_imm;
12178 address = offset_addr;
12183 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12185 /* Handle STRB (register). */
12186 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12187 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12188 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12189 offset_addr = u_regval[1] << shift_imm;
12190 address = u_regval[0] + offset_addr;
12194 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12195 if (bit (thumb2_insn_r->arm_insn, 10))
12197 if (bit (thumb2_insn_r->arm_insn, 9))
12198 offset_addr = u_regval[0] + offset_imm;
12200 offset_addr = u_regval[0] - offset_imm;
12202 address = offset_addr;
12205 address = u_regval[0];
12211 /* Store byte instructions. */
12214 record_buf_mem[0] = 1;
12216 /* Store half word instructions. */
12219 record_buf_mem[0] = 2;
12221 /* Store word instructions. */
12224 record_buf_mem[0] = 4;
12228 gdb_assert_not_reached ("no decoding pattern found");
12232 record_buf_mem[1] = address;
12233 thumb2_insn_r->mem_rec_count = 1;
12234 record_buf[0] = reg_rn;
12235 thumb2_insn_r->reg_rec_count = 1;
12237 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12239 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12241 return ARM_RECORD_SUCCESS;
12244 /* Handler for thumb2 load memory hints instructions. */
12247 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12249 uint32_t record_buf[8];
12250 uint32_t reg_rt, reg_rn;
12252 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12253 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12255 if (ARM_PC_REGNUM != reg_rt)
12257 record_buf[0] = reg_rt;
12258 record_buf[1] = reg_rn;
12259 record_buf[2] = ARM_PS_REGNUM;
12260 thumb2_insn_r->reg_rec_count = 3;
12262 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12264 return ARM_RECORD_SUCCESS;
12267 return ARM_RECORD_FAILURE;
12270 /* Handler for thumb2 load word instructions. */
12273 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12275 uint32_t record_buf[8];
12277 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12278 record_buf[1] = ARM_PS_REGNUM;
12279 thumb2_insn_r->reg_rec_count = 2;
12281 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12283 return ARM_RECORD_SUCCESS;
12286 /* Handler for thumb2 long multiply, long multiply accumulate, and
12287 divide instructions. */
12290 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12292 uint32_t opcode1 = 0, opcode2 = 0;
12293 uint32_t record_buf[8];
12295 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12296 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12298 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12300 /* Handle SMULL, UMULL, SMULAL. */
12301 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12302 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12303 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12304 record_buf[2] = ARM_PS_REGNUM;
12305 thumb2_insn_r->reg_rec_count = 3;
12307 else if (1 == opcode1 || 3 == opcode2)
12309 /* Handle SDIV and UDIV. */
12310 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12311 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12312 record_buf[2] = ARM_PS_REGNUM;
12313 thumb2_insn_r->reg_rec_count = 3;
12316 return ARM_RECORD_FAILURE;
12318 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12320 return ARM_RECORD_SUCCESS;
12323 /* Record handler for thumb32 coprocessor instructions. */
12326 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12328 if (bit (thumb2_insn_r->arm_insn, 25))
12329 return arm_record_coproc_data_proc (thumb2_insn_r);
12331 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12334 /* Record handler for advance SIMD structure load/store instructions. */
12337 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12339 struct regcache *reg_cache = thumb2_insn_r->regcache;
12340 uint32_t l_bit, a_bit, b_bits;
12341 uint32_t record_buf[128], record_buf_mem[128];
12342 uint32_t reg_rn, reg_vd, address, f_elem;
12343 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12346 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12347 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12348 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12349 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12350 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12351 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12352 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12353 f_elem = 8 / f_ebytes;
12357 ULONGEST u_regval = 0;
12358 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12359 address = u_regval;
12364 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12366 if (b_bits == 0x07)
12368 else if (b_bits == 0x0a)
12370 else if (b_bits == 0x06)
12372 else if (b_bits == 0x02)
12377 for (index_r = 0; index_r < bf_regs; index_r++)
12379 for (index_e = 0; index_e < f_elem; index_e++)
12381 record_buf_mem[index_m++] = f_ebytes;
12382 record_buf_mem[index_m++] = address;
12383 address = address + f_ebytes;
12384 thumb2_insn_r->mem_rec_count += 1;
12389 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12391 if (b_bits == 0x09 || b_bits == 0x08)
12393 else if (b_bits == 0x03)
12398 for (index_r = 0; index_r < bf_regs; index_r++)
12399 for (index_e = 0; index_e < f_elem; index_e++)
12401 for (loop_t = 0; loop_t < 2; loop_t++)
12403 record_buf_mem[index_m++] = f_ebytes;
12404 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12405 thumb2_insn_r->mem_rec_count += 1;
12407 address = address + (2 * f_ebytes);
12411 else if ((b_bits & 0x0e) == 0x04)
12413 for (index_e = 0; index_e < f_elem; index_e++)
12415 for (loop_t = 0; loop_t < 3; loop_t++)
12417 record_buf_mem[index_m++] = f_ebytes;
12418 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12419 thumb2_insn_r->mem_rec_count += 1;
12421 address = address + (3 * f_ebytes);
12425 else if (!(b_bits & 0x0e))
12427 for (index_e = 0; index_e < f_elem; index_e++)
12429 for (loop_t = 0; loop_t < 4; loop_t++)
12431 record_buf_mem[index_m++] = f_ebytes;
12432 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12433 thumb2_insn_r->mem_rec_count += 1;
12435 address = address + (4 * f_ebytes);
12441 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12443 if (bft_size == 0x00)
12445 else if (bft_size == 0x01)
12447 else if (bft_size == 0x02)
12453 if (!(b_bits & 0x0b) || b_bits == 0x08)
12454 thumb2_insn_r->mem_rec_count = 1;
12456 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12457 thumb2_insn_r->mem_rec_count = 2;
12459 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12460 thumb2_insn_r->mem_rec_count = 3;
12462 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12463 thumb2_insn_r->mem_rec_count = 4;
12465 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12467 record_buf_mem[index_m] = f_ebytes;
12468 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12477 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12478 thumb2_insn_r->reg_rec_count = 1;
12480 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12481 thumb2_insn_r->reg_rec_count = 2;
12483 else if ((b_bits & 0x0e) == 0x04)
12484 thumb2_insn_r->reg_rec_count = 3;
12486 else if (!(b_bits & 0x0e))
12487 thumb2_insn_r->reg_rec_count = 4;
12492 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12493 thumb2_insn_r->reg_rec_count = 1;
12495 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12496 thumb2_insn_r->reg_rec_count = 2;
12498 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12499 thumb2_insn_r->reg_rec_count = 3;
12501 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12502 thumb2_insn_r->reg_rec_count = 4;
12504 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12505 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12509 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12511 record_buf[index_r] = reg_rn;
12512 thumb2_insn_r->reg_rec_count += 1;
12515 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12517 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12522 /* Decodes thumb2 instruction type and invokes its record handler. */
12524 static unsigned int
12525 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12527 uint32_t op, op1, op2;
12529 op = bit (thumb2_insn_r->arm_insn, 15);
12530 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12531 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12535 if (!(op2 & 0x64 ))
12537 /* Load/store multiple instruction. */
12538 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12540 else if (!((op2 & 0x64) ^ 0x04))
12542 /* Load/store (dual/exclusive) and table branch instruction. */
12543 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12545 else if (!((op2 & 0x20) ^ 0x20))
12547 /* Data-processing (shifted register). */
12548 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12550 else if (op2 & 0x40)
12552 /* Co-processor instructions. */
12553 return thumb2_record_coproc_insn (thumb2_insn_r);
12556 else if (op1 == 0x02)
12560 /* Branches and miscellaneous control instructions. */
12561 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12563 else if (op2 & 0x20)
12565 /* Data-processing (plain binary immediate) instruction. */
12566 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12570 /* Data-processing (modified immediate). */
12571 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12574 else if (op1 == 0x03)
12576 if (!(op2 & 0x71 ))
12578 /* Store single data item. */
12579 return thumb2_record_str_single_data (thumb2_insn_r);
12581 else if (!((op2 & 0x71) ^ 0x10))
12583 /* Advanced SIMD or structure load/store instructions. */
12584 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12586 else if (!((op2 & 0x67) ^ 0x01))
12588 /* Load byte, memory hints instruction. */
12589 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12591 else if (!((op2 & 0x67) ^ 0x03))
12593 /* Load halfword, memory hints instruction. */
12594 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12596 else if (!((op2 & 0x67) ^ 0x05))
12598 /* Load word instruction. */
12599 return thumb2_record_ld_word (thumb2_insn_r);
12601 else if (!((op2 & 0x70) ^ 0x20))
12603 /* Data-processing (register) instruction. */
12604 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12606 else if (!((op2 & 0x78) ^ 0x30))
12608 /* Multiply, multiply accumulate, abs diff instruction. */
12609 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12611 else if (!((op2 & 0x78) ^ 0x38))
12613 /* Long multiply, long multiply accumulate, and divide. */
12614 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12616 else if (op2 & 0x40)
12618 /* Co-processor instructions. */
12619 return thumb2_record_coproc_insn (thumb2_insn_r);
12626 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12627 and positive val on fauilure. */
12630 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12632 gdb_byte buf[insn_size];
12634 memset (&buf[0], 0, insn_size);
12636 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12638 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12640 gdbarch_byte_order_for_code (insn_record->gdbarch));
12644 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12646 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12650 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12651 uint32_t insn_size)
12654 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12656 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12658 arm_record_data_proc_misc_ld_str, /* 000. */
12659 arm_record_data_proc_imm, /* 001. */
12660 arm_record_ld_st_imm_offset, /* 010. */
12661 arm_record_ld_st_reg_offset, /* 011. */
12662 arm_record_ld_st_multiple, /* 100. */
12663 arm_record_b_bl, /* 101. */
12664 arm_record_asimd_vfp_coproc, /* 110. */
12665 arm_record_coproc_data_proc /* 111. */
12668 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12670 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12672 thumb_record_shift_add_sub, /* 000. */
12673 thumb_record_add_sub_cmp_mov, /* 001. */
12674 thumb_record_ld_st_reg_offset, /* 010. */
12675 thumb_record_ld_st_imm_offset, /* 011. */
12676 thumb_record_ld_st_stack, /* 100. */
12677 thumb_record_misc, /* 101. */
12678 thumb_record_ldm_stm_swi, /* 110. */
12679 thumb_record_branch /* 111. */
12682 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12683 uint32_t insn_id = 0;
12685 if (extract_arm_insn (arm_record, insn_size))
12689 printf_unfiltered (_("Process record: error reading memory at "
12690 "addr %s len = %d.\n"),
12691 paddress (arm_record->gdbarch,
12692 arm_record->this_addr), insn_size);
12696 else if (ARM_RECORD == record_type)
12698 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12699 insn_id = bits (arm_record->arm_insn, 25, 27);
12700 ret = arm_record_extension_space (arm_record);
12701 /* If this insn has fallen into extension space
12702 then we need not decode it anymore. */
12703 if (ret != -1 && !INSN_RECORDED(arm_record))
12705 ret = arm_handle_insn[insn_id] (arm_record);
12708 else if (THUMB_RECORD == record_type)
12710 /* As thumb does not have condition codes, we set negative. */
12711 arm_record->cond = -1;
12712 insn_id = bits (arm_record->arm_insn, 13, 15);
12713 ret = thumb_handle_insn[insn_id] (arm_record);
12715 else if (THUMB2_RECORD == record_type)
12717 /* As thumb does not have condition codes, we set negative. */
12718 arm_record->cond = -1;
12720 /* Swap first half of 32bit thumb instruction with second half. */
12721 arm_record->arm_insn
12722 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12724 insn_id = thumb2_record_decode_insn_handler (arm_record);
12726 if (insn_id != ARM_RECORD_SUCCESS)
12728 arm_record_unsupported_insn (arm_record);
12734 /* Throw assertion. */
12735 gdb_assert_not_reached ("not a valid instruction, could not decode");
12742 /* Cleans up local record registers and memory allocations. */
12745 deallocate_reg_mem (insn_decode_record *record)
12747 xfree (record->arm_regs);
12748 xfree (record->arm_mems);
12752 /* Parse the current instruction and record the values of the registers and
12753 memory that will be changed in current instruction to record_arch_list".
12754 Return -1 if something is wrong. */
12757 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12758 CORE_ADDR insn_addr)
12761 uint32_t no_of_rec = 0;
12762 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12763 ULONGEST t_bit = 0, insn_id = 0;
12765 ULONGEST u_regval = 0;
12767 insn_decode_record arm_record;
12769 memset (&arm_record, 0, sizeof (insn_decode_record));
12770 arm_record.regcache = regcache;
12771 arm_record.this_addr = insn_addr;
12772 arm_record.gdbarch = gdbarch;
12775 if (record_debug > 1)
12777 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12779 paddress (gdbarch, arm_record.this_addr));
12782 if (extract_arm_insn (&arm_record, 2))
12786 printf_unfiltered (_("Process record: error reading memory at "
12787 "addr %s len = %d.\n"),
12788 paddress (arm_record.gdbarch,
12789 arm_record.this_addr), 2);
12794 /* Check the insn, whether it is thumb or arm one. */
12796 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12797 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12800 if (!(u_regval & t_bit))
12802 /* We are decoding arm insn. */
12803 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12807 insn_id = bits (arm_record.arm_insn, 11, 15);
12808 /* is it thumb2 insn? */
12809 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12811 ret = decode_insn (&arm_record, THUMB2_RECORD,
12812 THUMB2_INSN_SIZE_BYTES);
12816 /* We are decoding thumb insn. */
12817 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12823 /* Record registers. */
12824 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12825 if (arm_record.arm_regs)
12827 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12829 if (record_full_arch_list_add_reg
12830 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
12834 /* Record memories. */
12835 if (arm_record.arm_mems)
12837 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12839 if (record_full_arch_list_add_mem
12840 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12841 arm_record.arm_mems[no_of_rec].len))
12846 if (record_full_arch_list_add_end ())
12851 deallocate_reg_mem (&arm_record);