1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "arch/arm-get-next-pcs.h"
51 #include "gdb/sim-arm.h"
54 #include "coff/internal.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb
248 struct arm_prologue_cache
250 /* The stack pointer at the time this frame was created; i.e. the
251 caller's stack pointer when this function was called. It is used
252 to identify this frame. */
255 /* The frame base for this frame is just prev_sp - frame size.
256 FRAMESIZE is the distance from the frame pointer to the
257 initial stack pointer. */
261 /* The register used to hold the frame pointer for this frame. */
264 /* Saved register offsets. */
265 struct trad_frame_saved_reg *saved_regs;
268 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
269 CORE_ADDR prologue_start,
270 CORE_ADDR prologue_end,
271 struct arm_prologue_cache *cache);
273 /* Architecture version for displaced stepping. This effects the behaviour of
274 certain instructions, and really should not be hard-wired. */
276 #define DISPLACED_STEPPING_ARCH_VERSION 5
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 if (gdbarch_tdep (gdbarch)->is_m)
293 /* Determine if the processor is currently executing in Thumb mode. */
296 arm_is_thumb (struct regcache *regcache)
299 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
301 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
303 return (cpsr & t_bit) != 0;
306 /* Determine if FRAME is executing in Thumb mode. */
309 arm_frame_is_thumb (struct frame_info *frame)
312 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
314 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
315 directly (from a signal frame or dummy frame) or by interpreting
316 the saved LR (from a prologue or DWARF frame). So consult it and
317 trust the unwinders. */
318 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
320 return (cpsr & t_bit) != 0;
323 /* Callback for VEC_lower_bound. */
326 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
327 const struct arm_mapping_symbol *rhs)
329 return lhs->value < rhs->value;
332 /* Search for the mapping symbol covering MEMADDR. If one is found,
333 return its type. Otherwise, return 0. If START is non-NULL,
334 set *START to the location of the mapping symbol. */
337 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
339 struct obj_section *sec;
341 /* If there are mapping symbols, consult them. */
342 sec = find_pc_section (memaddr);
345 struct arm_per_objfile *data;
346 VEC(arm_mapping_symbol_s) *map;
347 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
351 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
352 arm_objfile_data_key);
355 map = data->section_maps[sec->the_bfd_section->index];
356 if (!VEC_empty (arm_mapping_symbol_s, map))
358 struct arm_mapping_symbol *map_sym;
360 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
361 arm_compare_mapping_symbols);
363 /* VEC_lower_bound finds the earliest ordered insertion
364 point. If the following symbol starts at this exact
365 address, we use that; otherwise, the preceding
366 mapping symbol covers this address. */
367 if (idx < VEC_length (arm_mapping_symbol_s, map))
369 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
370 if (map_sym->value == map_key.value)
373 *start = map_sym->value + obj_section_addr (sec);
374 return map_sym->type;
380 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
382 *start = map_sym->value + obj_section_addr (sec);
383 return map_sym->type;
392 /* Determine if the program counter specified in MEMADDR is in a Thumb
393 function. This function should be called for addresses unrelated to
394 any executing frame; otherwise, prefer arm_frame_is_thumb. */
397 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
399 struct bound_minimal_symbol sym;
401 struct displaced_step_closure* dsc
402 = get_displaced_step_closure_by_addr(memaddr);
404 /* If checking the mode of displaced instruction in copy area, the mode
405 should be determined by instruction on the original address. */
409 fprintf_unfiltered (gdb_stdlog,
410 "displaced: check mode of %.8lx instead of %.8lx\n",
411 (unsigned long) dsc->insn_addr,
412 (unsigned long) memaddr);
413 memaddr = dsc->insn_addr;
416 /* If bit 0 of the address is set, assume this is a Thumb address. */
417 if (IS_THUMB_ADDR (memaddr))
420 /* Respect internal mode override if active. */
421 if (arm_override_mode != -1)
422 return arm_override_mode;
424 /* If the user wants to override the symbol table, let him. */
425 if (strcmp (arm_force_mode_string, "arm") == 0)
427 if (strcmp (arm_force_mode_string, "thumb") == 0)
430 /* ARM v6-M and v7-M are always in Thumb mode. */
431 if (gdbarch_tdep (gdbarch)->is_m)
434 /* If there are mapping symbols, consult them. */
435 type = arm_find_mapping_symbol (memaddr, NULL);
439 /* Thumb functions have a "special" bit set in minimal symbols. */
440 sym = lookup_minimal_symbol_by_pc (memaddr);
442 return (MSYMBOL_IS_SPECIAL (sym.minsym));
444 /* If the user wants to override the fallback mode, let them. */
445 if (strcmp (arm_fallback_mode_string, "arm") == 0)
447 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
450 /* If we couldn't find any symbol, but we're talking to a running
451 target, then trust the current value of $cpsr. This lets
452 "display/i $pc" always show the correct mode (though if there is
453 a symbol table we will not reach here, so it still may not be
454 displayed in the mode it will be executed). */
455 if (target_has_registers)
456 return arm_frame_is_thumb (get_current_frame ());
458 /* Otherwise we're out of luck; we assume ARM. */
462 /* Remove useless bits from addresses in a running program. */
464 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
466 /* On M-profile devices, do not strip the low bit from EXC_RETURN
467 (the magic exception return address). */
468 if (gdbarch_tdep (gdbarch)->is_m
469 && (val & 0xfffffff0) == 0xfffffff0)
473 return UNMAKE_THUMB_ADDR (val);
475 return (val & 0x03fffffc);
478 /* Return 1 if PC is the start of a compiler helper function which
479 can be safely ignored during prologue skipping. IS_THUMB is true
480 if the function is known to be a Thumb function due to the way it
483 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
485 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
486 struct bound_minimal_symbol msym;
488 msym = lookup_minimal_symbol_by_pc (pc);
489 if (msym.minsym != NULL
490 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
491 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
493 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
495 /* The GNU linker's Thumb call stub to foo is named
497 if (strstr (name, "_from_thumb") != NULL)
500 /* On soft-float targets, __truncdfsf2 is called to convert promoted
501 arguments to their argument types in non-prototyped
503 if (startswith (name, "__truncdfsf2"))
505 if (startswith (name, "__aeabi_d2f"))
508 /* Internal functions related to thread-local storage. */
509 if (startswith (name, "__tls_get_addr"))
511 if (startswith (name, "__aeabi_read_tp"))
516 /* If we run against a stripped glibc, we may be unable to identify
517 special functions by name. Check for one important case,
518 __aeabi_read_tp, by comparing the *code* against the default
519 implementation (this is hand-written ARM assembler in glibc). */
522 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
523 == 0xe3e00a0f /* mov r0, #0xffff0fff */
524 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
525 == 0xe240f01f) /* sub pc, r0, #31 */
532 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
533 the first 16-bit of instruction, and INSN2 is the second 16-bit of
535 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
536 ((bits ((insn1), 0, 3) << 12) \
537 | (bits ((insn1), 10, 10) << 11) \
538 | (bits ((insn2), 12, 14) << 8) \
539 | bits ((insn2), 0, 7))
541 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
542 the 32-bit instruction. */
543 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
544 ((bits ((insn), 16, 19) << 12) \
545 | bits ((insn), 0, 11))
547 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
550 thumb_expand_immediate (unsigned int imm)
552 unsigned int count = imm >> 7;
560 return (imm & 0xff) | ((imm & 0xff) << 16);
562 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
564 return (imm & 0xff) | ((imm & 0xff) << 8)
565 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
568 return (0x80 | (imm & 0x7f)) << (32 - count);
571 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
572 epilogue, 0 otherwise. */
575 thumb_instruction_restores_sp (unsigned short insn)
577 return (insn == 0x46bd /* mov sp, r7 */
578 || (insn & 0xff80) == 0xb000 /* add sp, imm */
579 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
582 /* Analyze a Thumb prologue, looking for a recognizable stack frame
583 and frame pointer. Scan until we encounter a store that could
584 clobber the stack frame unexpectedly, or an unknown instruction.
585 Return the last address which is definitely safe to skip for an
586 initial breakpoint. */
589 thumb_analyze_prologue (struct gdbarch *gdbarch,
590 CORE_ADDR start, CORE_ADDR limit,
591 struct arm_prologue_cache *cache)
593 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
594 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
597 struct pv_area *stack;
598 struct cleanup *back_to;
600 CORE_ADDR unrecognized_pc = 0;
602 for (i = 0; i < 16; i++)
603 regs[i] = pv_register (i, 0);
604 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
605 back_to = make_cleanup_free_pv_area (stack);
607 while (start < limit)
611 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
613 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
618 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
621 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
622 whether to save LR (R14). */
623 mask = (insn & 0xff) | ((insn & 0x100) << 6);
625 /* Calculate offsets of saved R0-R7 and LR. */
626 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
627 if (mask & (1 << regno))
629 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
631 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
634 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
636 offset = (insn & 0x7f) << 2; /* get scaled offset */
637 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
640 else if (thumb_instruction_restores_sp (insn))
642 /* Don't scan past the epilogue. */
645 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
646 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
648 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
649 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
650 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
652 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
653 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
654 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
656 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
657 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
658 && pv_is_constant (regs[bits (insn, 3, 5)]))
659 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
660 regs[bits (insn, 6, 8)]);
661 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
662 && pv_is_constant (regs[bits (insn, 3, 6)]))
664 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
665 int rm = bits (insn, 3, 6);
666 regs[rd] = pv_add (regs[rd], regs[rm]);
668 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
670 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
671 int src_reg = (insn & 0x78) >> 3;
672 regs[dst_reg] = regs[src_reg];
674 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
676 /* Handle stores to the stack. Normally pushes are used,
677 but with GCC -mtpcs-frame, there may be other stores
678 in the prologue to create the frame. */
679 int regno = (insn >> 8) & 0x7;
682 offset = (insn & 0xff) << 2;
683 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
685 if (pv_area_store_would_trash (stack, addr))
688 pv_area_store (stack, addr, 4, regs[regno]);
690 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
692 int rd = bits (insn, 0, 2);
693 int rn = bits (insn, 3, 5);
696 offset = bits (insn, 6, 10) << 2;
697 addr = pv_add_constant (regs[rn], offset);
699 if (pv_area_store_would_trash (stack, addr))
702 pv_area_store (stack, addr, 4, regs[rd]);
704 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
705 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
706 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
707 /* Ignore stores of argument registers to the stack. */
709 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
710 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
711 /* Ignore block loads from the stack, potentially copying
712 parameters from memory. */
714 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
715 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
716 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
717 /* Similarly ignore single loads from the stack. */
719 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
720 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
721 /* Skip register copies, i.e. saves to another register
722 instead of the stack. */
724 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
725 /* Recognize constant loads; even with small stacks these are necessary
727 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
728 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
730 /* Constant pool loads, for the same reason. */
731 unsigned int constant;
734 loc = start + 4 + bits (insn, 0, 7) * 4;
735 constant = read_memory_unsigned_integer (loc, 4, byte_order);
736 regs[bits (insn, 8, 10)] = pv_constant (constant);
738 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
740 unsigned short inst2;
742 inst2 = read_memory_unsigned_integer (start + 2, 2,
743 byte_order_for_code);
745 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
747 /* BL, BLX. Allow some special function calls when
748 skipping the prologue; GCC generates these before
749 storing arguments to the stack. */
751 int j1, j2, imm1, imm2;
753 imm1 = sbits (insn, 0, 10);
754 imm2 = bits (inst2, 0, 10);
755 j1 = bit (inst2, 13);
756 j2 = bit (inst2, 11);
758 offset = ((imm1 << 12) + (imm2 << 1));
759 offset ^= ((!j2) << 22) | ((!j1) << 23);
761 nextpc = start + 4 + offset;
762 /* For BLX make sure to clear the low bits. */
763 if (bit (inst2, 12) == 0)
764 nextpc = nextpc & 0xfffffffc;
766 if (!skip_prologue_function (gdbarch, nextpc,
767 bit (inst2, 12) != 0))
771 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
773 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
775 pv_t addr = regs[bits (insn, 0, 3)];
778 if (pv_area_store_would_trash (stack, addr))
781 /* Calculate offsets of saved registers. */
782 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
783 if (inst2 & (1 << regno))
785 addr = pv_add_constant (addr, -4);
786 pv_area_store (stack, addr, 4, regs[regno]);
790 regs[bits (insn, 0, 3)] = addr;
793 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
795 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
797 int regno1 = bits (inst2, 12, 15);
798 int regno2 = bits (inst2, 8, 11);
799 pv_t addr = regs[bits (insn, 0, 3)];
801 offset = inst2 & 0xff;
803 addr = pv_add_constant (addr, offset);
805 addr = pv_add_constant (addr, -offset);
807 if (pv_area_store_would_trash (stack, addr))
810 pv_area_store (stack, addr, 4, regs[regno1]);
811 pv_area_store (stack, pv_add_constant (addr, 4),
815 regs[bits (insn, 0, 3)] = addr;
818 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
819 && (inst2 & 0x0c00) == 0x0c00
820 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
822 int regno = bits (inst2, 12, 15);
823 pv_t addr = regs[bits (insn, 0, 3)];
825 offset = inst2 & 0xff;
827 addr = pv_add_constant (addr, offset);
829 addr = pv_add_constant (addr, -offset);
831 if (pv_area_store_would_trash (stack, addr))
834 pv_area_store (stack, addr, 4, regs[regno]);
837 regs[bits (insn, 0, 3)] = addr;
840 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
841 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
843 int regno = bits (inst2, 12, 15);
846 offset = inst2 & 0xfff;
847 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
849 if (pv_area_store_would_trash (stack, addr))
852 pv_area_store (stack, addr, 4, regs[regno]);
855 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
856 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
857 /* Ignore stores of argument registers to the stack. */
860 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
861 && (inst2 & 0x0d00) == 0x0c00
862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
863 /* Ignore stores of argument registers to the stack. */
866 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
868 && (inst2 & 0x8000) == 0x0000
869 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 /* Ignore block loads from the stack, potentially copying
871 parameters from memory. */
874 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
876 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
877 /* Similarly ignore dual loads from the stack. */
880 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
881 && (inst2 & 0x0d00) == 0x0c00
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 /* Similarly ignore single loads from the stack. */
886 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
888 /* Similarly ignore single loads from the stack. */
891 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
892 && (inst2 & 0x8000) == 0x0000)
894 unsigned int imm = ((bits (insn, 10, 10) << 11)
895 | (bits (inst2, 12, 14) << 8)
896 | bits (inst2, 0, 7));
898 regs[bits (inst2, 8, 11)]
899 = pv_add_constant (regs[bits (insn, 0, 3)],
900 thumb_expand_immediate (imm));
903 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
904 && (inst2 & 0x8000) == 0x0000)
906 unsigned int imm = ((bits (insn, 10, 10) << 11)
907 | (bits (inst2, 12, 14) << 8)
908 | bits (inst2, 0, 7));
910 regs[bits (inst2, 8, 11)]
911 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
914 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
915 && (inst2 & 0x8000) == 0x0000)
917 unsigned int imm = ((bits (insn, 10, 10) << 11)
918 | (bits (inst2, 12, 14) << 8)
919 | bits (inst2, 0, 7));
921 regs[bits (inst2, 8, 11)]
922 = pv_add_constant (regs[bits (insn, 0, 3)],
923 - (CORE_ADDR) thumb_expand_immediate (imm));
926 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
927 && (inst2 & 0x8000) == 0x0000)
929 unsigned int imm = ((bits (insn, 10, 10) << 11)
930 | (bits (inst2, 12, 14) << 8)
931 | bits (inst2, 0, 7));
933 regs[bits (inst2, 8, 11)]
934 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
937 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
939 unsigned int imm = ((bits (insn, 10, 10) << 11)
940 | (bits (inst2, 12, 14) << 8)
941 | bits (inst2, 0, 7));
943 regs[bits (inst2, 8, 11)]
944 = pv_constant (thumb_expand_immediate (imm));
947 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
950 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
952 regs[bits (inst2, 8, 11)] = pv_constant (imm);
955 else if (insn == 0xea5f /* mov.w Rd,Rm */
956 && (inst2 & 0xf0f0) == 0)
958 int dst_reg = (inst2 & 0x0f00) >> 8;
959 int src_reg = inst2 & 0xf;
960 regs[dst_reg] = regs[src_reg];
963 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
965 /* Constant pool loads. */
966 unsigned int constant;
969 offset = bits (inst2, 0, 11);
971 loc = start + 4 + offset;
973 loc = start + 4 - offset;
975 constant = read_memory_unsigned_integer (loc, 4, byte_order);
976 regs[bits (inst2, 12, 15)] = pv_constant (constant);
979 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
981 /* Constant pool loads. */
982 unsigned int constant;
985 offset = bits (inst2, 0, 7) << 2;
987 loc = start + 4 + offset;
989 loc = start + 4 - offset;
991 constant = read_memory_unsigned_integer (loc, 4, byte_order);
992 regs[bits (inst2, 12, 15)] = pv_constant (constant);
994 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
995 regs[bits (inst2, 8, 11)] = pv_constant (constant);
998 else if (thumb2_instruction_changes_pc (insn, inst2))
1000 /* Don't scan past anything that might change control flow. */
1005 /* The optimizer might shove anything into the prologue,
1006 so we just skip what we don't recognize. */
1007 unrecognized_pc = start;
1012 else if (thumb_instruction_changes_pc (insn))
1014 /* Don't scan past anything that might change control flow. */
1019 /* The optimizer might shove anything into the prologue,
1020 so we just skip what we don't recognize. */
1021 unrecognized_pc = start;
1028 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1029 paddress (gdbarch, start));
1031 if (unrecognized_pc == 0)
1032 unrecognized_pc = start;
1036 do_cleanups (back_to);
1037 return unrecognized_pc;
1040 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1042 /* Frame pointer is fp. Frame size is constant. */
1043 cache->framereg = ARM_FP_REGNUM;
1044 cache->framesize = -regs[ARM_FP_REGNUM].k;
1046 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1048 /* Frame pointer is r7. Frame size is constant. */
1049 cache->framereg = THUMB_FP_REGNUM;
1050 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1054 /* Try the stack pointer... this is a bit desperate. */
1055 cache->framereg = ARM_SP_REGNUM;
1056 cache->framesize = -regs[ARM_SP_REGNUM].k;
1059 for (i = 0; i < 16; i++)
1060 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1061 cache->saved_regs[i].addr = offset;
1063 do_cleanups (back_to);
1064 return unrecognized_pc;
1068 /* Try to analyze the instructions starting from PC, which load symbol
1069 __stack_chk_guard. Return the address of instruction after loading this
1070 symbol, set the dest register number to *BASEREG, and set the size of
1071 instructions for loading symbol in OFFSET. Return 0 if instructions are
1075 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1076 unsigned int *destreg, int *offset)
1078 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1079 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1080 unsigned int low, high, address;
1085 unsigned short insn1
1086 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1088 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1090 *destreg = bits (insn1, 8, 10);
1092 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1093 address = read_memory_unsigned_integer (address, 4,
1094 byte_order_for_code);
1096 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1098 unsigned short insn2
1099 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1101 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1104 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1106 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1108 /* movt Rd, #const */
1109 if ((insn1 & 0xfbc0) == 0xf2c0)
1111 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1112 *destreg = bits (insn2, 8, 11);
1114 address = (high << 16 | low);
1121 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1123 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1125 address = bits (insn, 0, 11) + pc + 8;
1126 address = read_memory_unsigned_integer (address, 4,
1127 byte_order_for_code);
1129 *destreg = bits (insn, 12, 15);
1132 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1134 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1137 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1139 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1141 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 *destreg = bits (insn, 12, 15);
1144 address = (high << 16 | low);
1152 /* Try to skip a sequence of instructions used for stack protector. If PC
1153 points to the first instruction of this sequence, return the address of
1154 first instruction after this sequence, otherwise, return original PC.
1156 On arm, this sequence of instructions is composed of mainly three steps,
1157 Step 1: load symbol __stack_chk_guard,
1158 Step 2: load from address of __stack_chk_guard,
1159 Step 3: store it to somewhere else.
1161 Usually, instructions on step 2 and step 3 are the same on various ARM
1162 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1163 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1164 instructions in step 1 vary from different ARM architectures. On ARMv7,
1167 movw Rn, #:lower16:__stack_chk_guard
1168 movt Rn, #:upper16:__stack_chk_guard
1175 .word __stack_chk_guard
1177 Since ldr/str is a very popular instruction, we can't use them as
1178 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1179 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1180 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1183 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1185 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1186 unsigned int basereg;
1187 struct bound_minimal_symbol stack_chk_guard;
1189 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1192 /* Try to parse the instructions in Step 1. */
1193 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1198 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1199 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1200 Otherwise, this sequence cannot be for stack protector. */
1201 if (stack_chk_guard.minsym == NULL
1202 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1207 unsigned int destreg;
1209 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1211 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1212 if ((insn & 0xf800) != 0x6800)
1214 if (bits (insn, 3, 5) != basereg)
1216 destreg = bits (insn, 0, 2);
1218 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1219 byte_order_for_code);
1220 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1221 if ((insn & 0xf800) != 0x6000)
1223 if (destreg != bits (insn, 0, 2))
1228 unsigned int destreg;
1230 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1232 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1233 if ((insn & 0x0e500000) != 0x04100000)
1235 if (bits (insn, 16, 19) != basereg)
1237 destreg = bits (insn, 12, 15);
1238 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1239 insn = read_memory_unsigned_integer (pc + offset + 4,
1240 4, byte_order_for_code);
1241 if ((insn & 0x0e500000) != 0x04000000)
1243 if (bits (insn, 12, 15) != destreg)
1246 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1249 return pc + offset + 4;
1251 return pc + offset + 8;
1254 /* Advance the PC across any function entry prologue instructions to
1255 reach some "real" code.
1257 The APCS (ARM Procedure Call Standard) defines the following
1261 [stmfd sp!, {a1,a2,a3,a4}]
1262 stmfd sp!, {...,fp,ip,lr,pc}
1263 [stfe f7, [sp, #-12]!]
1264 [stfe f6, [sp, #-12]!]
1265 [stfe f5, [sp, #-12]!]
1266 [stfe f4, [sp, #-12]!]
1267 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1270 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1274 CORE_ADDR func_addr, limit_pc;
1276 /* See if we can determine the end of the prologue via the symbol table.
1277 If so, then return either PC, or the PC after the prologue, whichever
1279 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1281 CORE_ADDR post_prologue_pc
1282 = skip_prologue_using_sal (gdbarch, func_addr);
1283 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1285 if (post_prologue_pc)
1287 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1290 /* GCC always emits a line note before the prologue and another
1291 one after, even if the two are at the same address or on the
1292 same line. Take advantage of this so that we do not need to
1293 know every instruction that might appear in the prologue. We
1294 will have producer information for most binaries; if it is
1295 missing (e.g. for -gstabs), assuming the GNU tools. */
1296 if (post_prologue_pc
1298 || COMPUNIT_PRODUCER (cust) == NULL
1299 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1300 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1301 return post_prologue_pc;
1303 if (post_prologue_pc != 0)
1305 CORE_ADDR analyzed_limit;
1307 /* For non-GCC compilers, make sure the entire line is an
1308 acceptable prologue; GDB will round this function's
1309 return value up to the end of the following line so we
1310 can not skip just part of a line (and we do not want to).
1312 RealView does not treat the prologue specially, but does
1313 associate prologue code with the opening brace; so this
1314 lets us skip the first line if we think it is the opening
1316 if (arm_pc_is_thumb (gdbarch, func_addr))
1317 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1318 post_prologue_pc, NULL);
1320 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1321 post_prologue_pc, NULL);
1323 if (analyzed_limit != post_prologue_pc)
1326 return post_prologue_pc;
1330 /* Can't determine prologue from the symbol table, need to examine
1333 /* Find an upper limit on the function prologue using the debug
1334 information. If the debug information could not be used to provide
1335 that bound, then use an arbitrary large number as the upper bound. */
1336 /* Like arm_scan_prologue, stop no later than pc + 64. */
1337 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1339 limit_pc = pc + 64; /* Magic. */
1342 /* Check if this is Thumb code. */
1343 if (arm_pc_is_thumb (gdbarch, pc))
1344 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1346 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1350 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1351 This function decodes a Thumb function prologue to determine:
1352 1) the size of the stack frame
1353 2) which registers are saved on it
1354 3) the offsets of saved regs
1355 4) the offset from the stack pointer to the frame pointer
1357 A typical Thumb function prologue would create this stack frame
1358 (offsets relative to FP)
1359 old SP -> 24 stack parameters
1362 R7 -> 0 local variables (16 bytes)
1363 SP -> -12 additional stack space (12 bytes)
1364 The frame size would thus be 36 bytes, and the frame offset would be
1365 12 bytes. The frame register is R7.
1367 The comments for thumb_skip_prolog() describe the algorithm we use
1368 to detect the end of the prolog. */
1372 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1373 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1375 CORE_ADDR prologue_start;
1376 CORE_ADDR prologue_end;
1378 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1381 /* See comment in arm_scan_prologue for an explanation of
1383 if (prologue_end > prologue_start + 64)
1385 prologue_end = prologue_start + 64;
1389 /* We're in the boondocks: we have no idea where the start of the
1393 prologue_end = min (prologue_end, prev_pc);
1395 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1398 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1402 arm_instruction_restores_sp (unsigned int insn)
1404 if (bits (insn, 28, 31) != INST_NV)
1406 if ((insn & 0x0df0f000) == 0x0080d000
1407 /* ADD SP (register or immediate). */
1408 || (insn & 0x0df0f000) == 0x0040d000
1409 /* SUB SP (register or immediate). */
1410 || (insn & 0x0ffffff0) == 0x01a0d000
1412 || (insn & 0x0fff0000) == 0x08bd0000
1414 || (insn & 0x0fff0000) == 0x049d0000)
1415 /* POP of a single register. */
1422 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1423 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1424 fill it in. Return the first address not recognized as a prologue
1427 We recognize all the instructions typically found in ARM prologues,
1428 plus harmless instructions which can be skipped (either for analysis
1429 purposes, or a more restrictive set that can be skipped when finding
1430 the end of the prologue). */
1433 arm_analyze_prologue (struct gdbarch *gdbarch,
1434 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1435 struct arm_prologue_cache *cache)
1437 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1438 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1440 CORE_ADDR offset, current_pc;
1441 pv_t regs[ARM_FPS_REGNUM];
1442 struct pv_area *stack;
1443 struct cleanup *back_to;
1444 CORE_ADDR unrecognized_pc = 0;
1446 /* Search the prologue looking for instructions that set up the
1447 frame pointer, adjust the stack pointer, and save registers.
1449 Be careful, however, and if it doesn't look like a prologue,
1450 don't try to scan it. If, for instance, a frameless function
1451 begins with stmfd sp!, then we will tell ourselves there is
1452 a frame, which will confuse stack traceback, as well as "finish"
1453 and other operations that rely on a knowledge of the stack
1456 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1457 regs[regno] = pv_register (regno, 0);
1458 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1459 back_to = make_cleanup_free_pv_area (stack);
1461 for (current_pc = prologue_start;
1462 current_pc < prologue_end;
1466 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1468 if (insn == 0xe1a0c00d) /* mov ip, sp */
1470 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1473 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1474 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1476 unsigned imm = insn & 0xff; /* immediate value */
1477 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1478 int rd = bits (insn, 12, 15);
1479 imm = (imm >> rot) | (imm << (32 - rot));
1480 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1483 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1484 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1486 unsigned imm = insn & 0xff; /* immediate value */
1487 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1488 int rd = bits (insn, 12, 15);
1489 imm = (imm >> rot) | (imm << (32 - rot));
1490 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1493 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1496 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1498 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1499 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1500 regs[bits (insn, 12, 15)]);
1503 else if ((insn & 0xffff0000) == 0xe92d0000)
1504 /* stmfd sp!, {..., fp, ip, lr, pc}
1506 stmfd sp!, {a1, a2, a3, a4} */
1508 int mask = insn & 0xffff;
1510 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1513 /* Calculate offsets of saved registers. */
1514 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1515 if (mask & (1 << regno))
1518 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1519 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1522 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1523 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1524 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1526 /* No need to add this to saved_regs -- it's just an arg reg. */
1529 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1530 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1531 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1533 /* No need to add this to saved_regs -- it's just an arg reg. */
1536 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1538 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1540 /* No need to add this to saved_regs -- it's just arg regs. */
1543 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1545 unsigned imm = insn & 0xff; /* immediate value */
1546 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1547 imm = (imm >> rot) | (imm << (32 - rot));
1548 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1550 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1552 unsigned imm = insn & 0xff; /* immediate value */
1553 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1554 imm = (imm >> rot) | (imm << (32 - rot));
1555 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1557 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1559 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1561 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1564 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1565 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1566 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1568 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1570 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1572 int n_saved_fp_regs;
1573 unsigned int fp_start_reg, fp_bound_reg;
1575 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1578 if ((insn & 0x800) == 0x800) /* N0 is set */
1580 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1581 n_saved_fp_regs = 3;
1583 n_saved_fp_regs = 1;
1587 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1588 n_saved_fp_regs = 2;
1590 n_saved_fp_regs = 4;
1593 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1594 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1595 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1597 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1598 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1599 regs[fp_start_reg++]);
1602 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1604 /* Allow some special function calls when skipping the
1605 prologue; GCC generates these before storing arguments to
1607 CORE_ADDR dest = BranchDest (current_pc, insn);
1609 if (skip_prologue_function (gdbarch, dest, 0))
1614 else if ((insn & 0xf0000000) != 0xe0000000)
1615 break; /* Condition not true, exit early. */
1616 else if (arm_instruction_changes_pc (insn))
1617 /* Don't scan past anything that might change control flow. */
1619 else if (arm_instruction_restores_sp (insn))
1621 /* Don't scan past the epilogue. */
1624 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1625 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1626 /* Ignore block loads from the stack, potentially copying
1627 parameters from memory. */
1629 else if ((insn & 0xfc500000) == 0xe4100000
1630 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1631 /* Similarly ignore single loads from the stack. */
1633 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1634 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1635 register instead of the stack. */
1639 /* The optimizer might shove anything into the prologue, if
1640 we build up cache (cache != NULL) from scanning prologue,
1641 we just skip what we don't recognize and scan further to
1642 make cache as complete as possible. However, if we skip
1643 prologue, we'll stop immediately on unrecognized
1645 unrecognized_pc = current_pc;
1653 if (unrecognized_pc == 0)
1654 unrecognized_pc = current_pc;
1658 int framereg, framesize;
1660 /* The frame size is just the distance from the frame register
1661 to the original stack pointer. */
1662 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1664 /* Frame pointer is fp. */
1665 framereg = ARM_FP_REGNUM;
1666 framesize = -regs[ARM_FP_REGNUM].k;
1670 /* Try the stack pointer... this is a bit desperate. */
1671 framereg = ARM_SP_REGNUM;
1672 framesize = -regs[ARM_SP_REGNUM].k;
1675 cache->framereg = framereg;
1676 cache->framesize = framesize;
1678 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1679 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1680 cache->saved_regs[regno].addr = offset;
1684 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1685 paddress (gdbarch, unrecognized_pc));
1687 do_cleanups (back_to);
1688 return unrecognized_pc;
1692 arm_scan_prologue (struct frame_info *this_frame,
1693 struct arm_prologue_cache *cache)
1695 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1696 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1698 CORE_ADDR prologue_start, prologue_end, current_pc;
1699 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1700 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1701 pv_t regs[ARM_FPS_REGNUM];
1702 struct pv_area *stack;
1703 struct cleanup *back_to;
1706 /* Assume there is no frame until proven otherwise. */
1707 cache->framereg = ARM_SP_REGNUM;
1708 cache->framesize = 0;
1710 /* Check for Thumb prologue. */
1711 if (arm_frame_is_thumb (this_frame))
1713 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1717 /* Find the function prologue. If we can't find the function in
1718 the symbol table, peek in the stack frame to find the PC. */
1719 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1722 /* One way to find the end of the prologue (which works well
1723 for unoptimized code) is to do the following:
1725 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1728 prologue_end = prev_pc;
1729 else if (sal.end < prologue_end)
1730 prologue_end = sal.end;
1732 This mechanism is very accurate so long as the optimizer
1733 doesn't move any instructions from the function body into the
1734 prologue. If this happens, sal.end will be the last
1735 instruction in the first hunk of prologue code just before
1736 the first instruction that the scheduler has moved from
1737 the body to the prologue.
1739 In order to make sure that we scan all of the prologue
1740 instructions, we use a slightly less accurate mechanism which
1741 may scan more than necessary. To help compensate for this
1742 lack of accuracy, the prologue scanning loop below contains
1743 several clauses which'll cause the loop to terminate early if
1744 an implausible prologue instruction is encountered.
1750 is a suitable endpoint since it accounts for the largest
1751 possible prologue plus up to five instructions inserted by
1754 if (prologue_end > prologue_start + 64)
1756 prologue_end = prologue_start + 64; /* See above. */
1761 /* We have no symbol information. Our only option is to assume this
1762 function has a standard stack frame and the normal frame register.
1763 Then, we can find the value of our frame pointer on entrance to
1764 the callee (or at the present moment if this is the innermost frame).
1765 The value stored there should be the address of the stmfd + 8. */
1766 CORE_ADDR frame_loc;
1767 LONGEST return_value;
1769 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1770 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1774 prologue_start = gdbarch_addr_bits_remove
1775 (gdbarch, return_value) - 8;
1776 prologue_end = prologue_start + 64; /* See above. */
1780 if (prev_pc < prologue_end)
1781 prologue_end = prev_pc;
1783 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1786 static struct arm_prologue_cache *
1787 arm_make_prologue_cache (struct frame_info *this_frame)
1790 struct arm_prologue_cache *cache;
1791 CORE_ADDR unwound_fp;
1793 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1794 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1796 arm_scan_prologue (this_frame, cache);
1798 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1799 if (unwound_fp == 0)
1802 cache->prev_sp = unwound_fp + cache->framesize;
1804 /* Calculate actual addresses of saved registers using offsets
1805 determined by arm_scan_prologue. */
1806 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1807 if (trad_frame_addr_p (cache->saved_regs, reg))
1808 cache->saved_regs[reg].addr += cache->prev_sp;
1813 /* Implementation of the stop_reason hook for arm_prologue frames. */
1815 static enum unwind_stop_reason
1816 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1819 struct arm_prologue_cache *cache;
1822 if (*this_cache == NULL)
1823 *this_cache = arm_make_prologue_cache (this_frame);
1824 cache = (struct arm_prologue_cache *) *this_cache;
1826 /* This is meant to halt the backtrace at "_start". */
1827 pc = get_frame_pc (this_frame);
1828 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1829 return UNWIND_OUTERMOST;
1831 /* If we've hit a wall, stop. */
1832 if (cache->prev_sp == 0)
1833 return UNWIND_OUTERMOST;
1835 return UNWIND_NO_REASON;
1838 /* Our frame ID for a normal frame is the current function's starting PC
1839 and the caller's SP when we were called. */
1842 arm_prologue_this_id (struct frame_info *this_frame,
1844 struct frame_id *this_id)
1846 struct arm_prologue_cache *cache;
1850 if (*this_cache == NULL)
1851 *this_cache = arm_make_prologue_cache (this_frame);
1852 cache = (struct arm_prologue_cache *) *this_cache;
1854 /* Use function start address as part of the frame ID. If we cannot
1855 identify the start address (due to missing symbol information),
1856 fall back to just using the current PC. */
1857 pc = get_frame_pc (this_frame);
1858 func = get_frame_func (this_frame);
1862 id = frame_id_build (cache->prev_sp, func);
1866 static struct value *
1867 arm_prologue_prev_register (struct frame_info *this_frame,
1871 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1872 struct arm_prologue_cache *cache;
1874 if (*this_cache == NULL)
1875 *this_cache = arm_make_prologue_cache (this_frame);
1876 cache = (struct arm_prologue_cache *) *this_cache;
1878 /* If we are asked to unwind the PC, then we need to return the LR
1879 instead. The prologue may save PC, but it will point into this
1880 frame's prologue, not the next frame's resume location. Also
1881 strip the saved T bit. A valid LR may have the low bit set, but
1882 a valid PC never does. */
1883 if (prev_regnum == ARM_PC_REGNUM)
1887 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1888 return frame_unwind_got_constant (this_frame, prev_regnum,
1889 arm_addr_bits_remove (gdbarch, lr));
1892 /* SP is generally not saved to the stack, but this frame is
1893 identified by the next frame's stack pointer at the time of the call.
1894 The value was already reconstructed into PREV_SP. */
1895 if (prev_regnum == ARM_SP_REGNUM)
1896 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1898 /* The CPSR may have been changed by the call instruction and by the
1899 called function. The only bit we can reconstruct is the T bit,
1900 by checking the low bit of LR as of the call. This is a reliable
1901 indicator of Thumb-ness except for some ARM v4T pre-interworking
1902 Thumb code, which could get away with a clear low bit as long as
1903 the called function did not use bx. Guess that all other
1904 bits are unchanged; the condition flags are presumably lost,
1905 but the processor status is likely valid. */
1906 if (prev_regnum == ARM_PS_REGNUM)
1909 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1911 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1912 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1913 if (IS_THUMB_ADDR (lr))
1917 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1920 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1924 struct frame_unwind arm_prologue_unwind = {
1926 arm_prologue_unwind_stop_reason,
1927 arm_prologue_this_id,
1928 arm_prologue_prev_register,
1930 default_frame_sniffer
1933 /* Maintain a list of ARM exception table entries per objfile, similar to the
1934 list of mapping symbols. We only cache entries for standard ARM-defined
1935 personality routines; the cache will contain only the frame unwinding
1936 instructions associated with the entry (not the descriptors). */
1938 static const struct objfile_data *arm_exidx_data_key;
1940 struct arm_exidx_entry
1945 typedef struct arm_exidx_entry arm_exidx_entry_s;
1946 DEF_VEC_O(arm_exidx_entry_s);
1948 struct arm_exidx_data
1950 VEC(arm_exidx_entry_s) **section_maps;
1954 arm_exidx_data_free (struct objfile *objfile, void *arg)
1956 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1959 for (i = 0; i < objfile->obfd->section_count; i++)
1960 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1964 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1965 const struct arm_exidx_entry *rhs)
1967 return lhs->addr < rhs->addr;
1970 static struct obj_section *
1971 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1973 struct obj_section *osect;
1975 ALL_OBJFILE_OSECTIONS (objfile, osect)
1976 if (bfd_get_section_flags (objfile->obfd,
1977 osect->the_bfd_section) & SEC_ALLOC)
1979 bfd_vma start, size;
1980 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1981 size = bfd_get_section_size (osect->the_bfd_section);
1983 if (start <= vma && vma < start + size)
1990 /* Parse contents of exception table and exception index sections
1991 of OBJFILE, and fill in the exception table entry cache.
1993 For each entry that refers to a standard ARM-defined personality
1994 routine, extract the frame unwinding instructions (from either
1995 the index or the table section). The unwinding instructions
1997 - extracting them from the rest of the table data
1998 - converting to host endianness
1999 - appending the implicit 0xb0 ("Finish") code
2001 The extracted and normalized instructions are stored for later
2002 retrieval by the arm_find_exidx_entry routine. */
2005 arm_exidx_new_objfile (struct objfile *objfile)
2007 struct cleanup *cleanups;
2008 struct arm_exidx_data *data;
2009 asection *exidx, *extab;
2010 bfd_vma exidx_vma = 0, extab_vma = 0;
2011 bfd_size_type exidx_size = 0, extab_size = 0;
2012 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2015 /* If we've already touched this file, do nothing. */
2016 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2018 cleanups = make_cleanup (null_cleanup, NULL);
2020 /* Read contents of exception table and index. */
2021 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2024 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2025 exidx_size = bfd_get_section_size (exidx);
2026 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2027 make_cleanup (xfree, exidx_data);
2029 if (!bfd_get_section_contents (objfile->obfd, exidx,
2030 exidx_data, 0, exidx_size))
2032 do_cleanups (cleanups);
2037 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2040 extab_vma = bfd_section_vma (objfile->obfd, extab);
2041 extab_size = bfd_get_section_size (extab);
2042 extab_data = (gdb_byte *) xmalloc (extab_size);
2043 make_cleanup (xfree, extab_data);
2045 if (!bfd_get_section_contents (objfile->obfd, extab,
2046 extab_data, 0, extab_size))
2048 do_cleanups (cleanups);
2053 /* Allocate exception table data structure. */
2054 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2055 set_objfile_data (objfile, arm_exidx_data_key, data);
2056 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2057 objfile->obfd->section_count,
2058 VEC(arm_exidx_entry_s) *);
2060 /* Fill in exception table. */
2061 for (i = 0; i < exidx_size / 8; i++)
2063 struct arm_exidx_entry new_exidx_entry;
2064 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2065 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2066 bfd_vma addr = 0, word = 0;
2067 int n_bytes = 0, n_words = 0;
2068 struct obj_section *sec;
2069 gdb_byte *entry = NULL;
2071 /* Extract address of start of function. */
2072 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2073 idx += exidx_vma + i * 8;
2075 /* Find section containing function and compute section offset. */
2076 sec = arm_obj_section_from_vma (objfile, idx);
2079 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2081 /* Determine address of exception table entry. */
2084 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2086 else if ((val & 0xff000000) == 0x80000000)
2088 /* Exception table entry embedded in .ARM.exidx
2089 -- must be short form. */
2093 else if (!(val & 0x80000000))
2095 /* Exception table entry in .ARM.extab. */
2096 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2097 addr += exidx_vma + i * 8 + 4;
2099 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2101 word = bfd_h_get_32 (objfile->obfd,
2102 extab_data + addr - extab_vma);
2105 if ((word & 0xff000000) == 0x80000000)
2110 else if ((word & 0xff000000) == 0x81000000
2111 || (word & 0xff000000) == 0x82000000)
2115 n_words = ((word >> 16) & 0xff);
2117 else if (!(word & 0x80000000))
2120 struct obj_section *pers_sec;
2121 int gnu_personality = 0;
2123 /* Custom personality routine. */
2124 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2125 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2127 /* Check whether we've got one of the variants of the
2128 GNU personality routines. */
2129 pers_sec = arm_obj_section_from_vma (objfile, pers);
2132 static const char *personality[] =
2134 "__gcc_personality_v0",
2135 "__gxx_personality_v0",
2136 "__gcj_personality_v0",
2137 "__gnu_objc_personality_v0",
2141 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2144 for (k = 0; personality[k]; k++)
2145 if (lookup_minimal_symbol_by_pc_name
2146 (pc, personality[k], objfile))
2148 gnu_personality = 1;
2153 /* If so, the next word contains a word count in the high
2154 byte, followed by the same unwind instructions as the
2155 pre-defined forms. */
2157 && addr + 4 <= extab_vma + extab_size)
2159 word = bfd_h_get_32 (objfile->obfd,
2160 extab_data + addr - extab_vma);
2163 n_words = ((word >> 24) & 0xff);
2169 /* Sanity check address. */
2171 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2172 n_words = n_bytes = 0;
2174 /* The unwind instructions reside in WORD (only the N_BYTES least
2175 significant bytes are valid), followed by N_WORDS words in the
2176 extab section starting at ADDR. */
2177 if (n_bytes || n_words)
2180 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2181 n_bytes + n_words * 4 + 1);
2184 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2188 word = bfd_h_get_32 (objfile->obfd,
2189 extab_data + addr - extab_vma);
2192 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2193 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2194 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2195 *p++ = (gdb_byte) (word & 0xff);
2198 /* Implied "Finish" to terminate the list. */
2202 /* Push entry onto vector. They are guaranteed to always
2203 appear in order of increasing addresses. */
2204 new_exidx_entry.addr = idx;
2205 new_exidx_entry.entry = entry;
2206 VEC_safe_push (arm_exidx_entry_s,
2207 data->section_maps[sec->the_bfd_section->index],
2211 do_cleanups (cleanups);
2214 /* Search for the exception table entry covering MEMADDR. If one is found,
2215 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2216 set *START to the start of the region covered by this entry. */
2219 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2221 struct obj_section *sec;
2223 sec = find_pc_section (memaddr);
2226 struct arm_exidx_data *data;
2227 VEC(arm_exidx_entry_s) *map;
2228 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2231 data = ((struct arm_exidx_data *)
2232 objfile_data (sec->objfile, arm_exidx_data_key));
2235 map = data->section_maps[sec->the_bfd_section->index];
2236 if (!VEC_empty (arm_exidx_entry_s, map))
2238 struct arm_exidx_entry *map_sym;
2240 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2241 arm_compare_exidx_entries);
2243 /* VEC_lower_bound finds the earliest ordered insertion
2244 point. If the following symbol starts at this exact
2245 address, we use that; otherwise, the preceding
2246 exception table entry covers this address. */
2247 if (idx < VEC_length (arm_exidx_entry_s, map))
2249 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2250 if (map_sym->addr == map_key.addr)
2253 *start = map_sym->addr + obj_section_addr (sec);
2254 return map_sym->entry;
2260 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2262 *start = map_sym->addr + obj_section_addr (sec);
2263 return map_sym->entry;
2272 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2273 instruction list from the ARM exception table entry ENTRY, allocate and
2274 return a prologue cache structure describing how to unwind this frame.
2276 Return NULL if the unwinding instruction list contains a "spare",
2277 "reserved" or "refuse to unwind" instruction as defined in section
2278 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2279 for the ARM Architecture" document. */
2281 static struct arm_prologue_cache *
2282 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2287 struct arm_prologue_cache *cache;
2288 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2289 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2295 /* Whenever we reload SP, we actually have to retrieve its
2296 actual value in the current frame. */
2299 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2301 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2302 vsp = get_frame_register_unsigned (this_frame, reg);
2306 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2307 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2313 /* Decode next unwind instruction. */
2316 if ((insn & 0xc0) == 0)
2318 int offset = insn & 0x3f;
2319 vsp += (offset << 2) + 4;
2321 else if ((insn & 0xc0) == 0x40)
2323 int offset = insn & 0x3f;
2324 vsp -= (offset << 2) + 4;
2326 else if ((insn & 0xf0) == 0x80)
2328 int mask = ((insn & 0xf) << 8) | *entry++;
2331 /* The special case of an all-zero mask identifies
2332 "Refuse to unwind". We return NULL to fall back
2333 to the prologue analyzer. */
2337 /* Pop registers r4..r15 under mask. */
2338 for (i = 0; i < 12; i++)
2339 if (mask & (1 << i))
2341 cache->saved_regs[4 + i].addr = vsp;
2345 /* Special-case popping SP -- we need to reload vsp. */
2346 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2349 else if ((insn & 0xf0) == 0x90)
2351 int reg = insn & 0xf;
2353 /* Reserved cases. */
2354 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2357 /* Set SP from another register and mark VSP for reload. */
2358 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2361 else if ((insn & 0xf0) == 0xa0)
2363 int count = insn & 0x7;
2364 int pop_lr = (insn & 0x8) != 0;
2367 /* Pop r4..r[4+count]. */
2368 for (i = 0; i <= count; i++)
2370 cache->saved_regs[4 + i].addr = vsp;
2374 /* If indicated by flag, pop LR as well. */
2377 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2381 else if (insn == 0xb0)
2383 /* We could only have updated PC by popping into it; if so, it
2384 will show up as address. Otherwise, copy LR into PC. */
2385 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2386 cache->saved_regs[ARM_PC_REGNUM]
2387 = cache->saved_regs[ARM_LR_REGNUM];
2392 else if (insn == 0xb1)
2394 int mask = *entry++;
2397 /* All-zero mask and mask >= 16 is "spare". */
2398 if (mask == 0 || mask >= 16)
2401 /* Pop r0..r3 under mask. */
2402 for (i = 0; i < 4; i++)
2403 if (mask & (1 << i))
2405 cache->saved_regs[i].addr = vsp;
2409 else if (insn == 0xb2)
2411 ULONGEST offset = 0;
2416 offset |= (*entry & 0x7f) << shift;
2419 while (*entry++ & 0x80);
2421 vsp += 0x204 + (offset << 2);
2423 else if (insn == 0xb3)
2425 int start = *entry >> 4;
2426 int count = (*entry++) & 0xf;
2429 /* Only registers D0..D15 are valid here. */
2430 if (start + count >= 16)
2433 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2434 for (i = 0; i <= count; i++)
2436 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2440 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2443 else if ((insn & 0xf8) == 0xb8)
2445 int count = insn & 0x7;
2448 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2449 for (i = 0; i <= count; i++)
2451 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2455 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2458 else if (insn == 0xc6)
2460 int start = *entry >> 4;
2461 int count = (*entry++) & 0xf;
2464 /* Only registers WR0..WR15 are valid. */
2465 if (start + count >= 16)
2468 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2469 for (i = 0; i <= count; i++)
2471 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2475 else if (insn == 0xc7)
2477 int mask = *entry++;
2480 /* All-zero mask and mask >= 16 is "spare". */
2481 if (mask == 0 || mask >= 16)
2484 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2485 for (i = 0; i < 4; i++)
2486 if (mask & (1 << i))
2488 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2492 else if ((insn & 0xf8) == 0xc0)
2494 int count = insn & 0x7;
2497 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2498 for (i = 0; i <= count; i++)
2500 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2504 else if (insn == 0xc8)
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2510 /* Only registers D0..D31 are valid. */
2511 if (start + count >= 16)
2514 /* Pop VFP double-precision registers
2515 D[16+start]..D[16+start+count]. */
2516 for (i = 0; i <= count; i++)
2518 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2522 else if (insn == 0xc9)
2524 int start = *entry >> 4;
2525 int count = (*entry++) & 0xf;
2528 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2529 for (i = 0; i <= count; i++)
2531 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2535 else if ((insn & 0xf8) == 0xd0)
2537 int count = insn & 0x7;
2540 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2541 for (i = 0; i <= count; i++)
2543 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2549 /* Everything else is "spare". */
2554 /* If we restore SP from a register, assume this was the frame register.
2555 Otherwise just fall back to SP as frame register. */
2556 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2557 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2559 cache->framereg = ARM_SP_REGNUM;
2561 /* Determine offset to previous frame. */
2563 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2565 /* We already got the previous SP. */
2566 cache->prev_sp = vsp;
2571 /* Unwinding via ARM exception table entries. Note that the sniffer
2572 already computes a filled-in prologue cache, which is then used
2573 with the same arm_prologue_this_id and arm_prologue_prev_register
2574 routines also used for prologue-parsing based unwinding. */
2577 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2578 struct frame_info *this_frame,
2579 void **this_prologue_cache)
2581 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2582 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2583 CORE_ADDR addr_in_block, exidx_region, func_start;
2584 struct arm_prologue_cache *cache;
2587 /* See if we have an ARM exception table entry covering this address. */
2588 addr_in_block = get_frame_address_in_block (this_frame);
2589 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2593 /* The ARM exception table does not describe unwind information
2594 for arbitrary PC values, but is guaranteed to be correct only
2595 at call sites. We have to decide here whether we want to use
2596 ARM exception table information for this frame, or fall back
2597 to using prologue parsing. (Note that if we have DWARF CFI,
2598 this sniffer isn't even called -- CFI is always preferred.)
2600 Before we make this decision, however, we check whether we
2601 actually have *symbol* information for the current frame.
2602 If not, prologue parsing would not work anyway, so we might
2603 as well use the exception table and hope for the best. */
2604 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2608 /* If the next frame is "normal", we are at a call site in this
2609 frame, so exception information is guaranteed to be valid. */
2610 if (get_next_frame (this_frame)
2611 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2614 /* We also assume exception information is valid if we're currently
2615 blocked in a system call. The system library is supposed to
2616 ensure this, so that e.g. pthread cancellation works. */
2617 if (arm_frame_is_thumb (this_frame))
2621 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2622 byte_order_for_code, &insn)
2623 && (insn & 0xff00) == 0xdf00 /* svc */)
2630 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2631 byte_order_for_code, &insn)
2632 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2636 /* Bail out if we don't know that exception information is valid. */
2640 /* The ARM exception index does not mark the *end* of the region
2641 covered by the entry, and some functions will not have any entry.
2642 To correctly recognize the end of the covered region, the linker
2643 should have inserted dummy records with a CANTUNWIND marker.
2645 Unfortunately, current versions of GNU ld do not reliably do
2646 this, and thus we may have found an incorrect entry above.
2647 As a (temporary) sanity check, we only use the entry if it
2648 lies *within* the bounds of the function. Note that this check
2649 might reject perfectly valid entries that just happen to cover
2650 multiple functions; therefore this check ought to be removed
2651 once the linker is fixed. */
2652 if (func_start > exidx_region)
2656 /* Decode the list of unwinding instructions into a prologue cache.
2657 Note that this may fail due to e.g. a "refuse to unwind" code. */
2658 cache = arm_exidx_fill_cache (this_frame, entry);
2662 *this_prologue_cache = cache;
2666 struct frame_unwind arm_exidx_unwind = {
2668 default_frame_unwind_stop_reason,
2669 arm_prologue_this_id,
2670 arm_prologue_prev_register,
2672 arm_exidx_unwind_sniffer
2675 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2676 trampoline, return the target PC. Otherwise return 0.
2678 void call0a (char c, short s, int i, long l) {}
2682 (*pointer_to_call0a) (c, s, i, l);
2685 Instead of calling a stub library function _call_via_xx (xx is
2686 the register name), GCC may inline the trampoline in the object
2687 file as below (register r2 has the address of call0a).
2690 .type main, %function
2699 The trampoline 'bx r2' doesn't belong to main. */
2702 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2704 /* The heuristics of recognizing such trampoline is that FRAME is
2705 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2706 if (arm_frame_is_thumb (frame))
2710 if (target_read_memory (pc, buf, 2) == 0)
2712 struct gdbarch *gdbarch = get_frame_arch (frame);
2713 enum bfd_endian byte_order_for_code
2714 = gdbarch_byte_order_for_code (gdbarch);
2716 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2718 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2721 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2723 /* Clear the LSB so that gdb core sets step-resume
2724 breakpoint at the right address. */
2725 return UNMAKE_THUMB_ADDR (dest);
2733 static struct arm_prologue_cache *
2734 arm_make_stub_cache (struct frame_info *this_frame)
2736 struct arm_prologue_cache *cache;
2738 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2739 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2741 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2746 /* Our frame ID for a stub frame is the current SP and LR. */
2749 arm_stub_this_id (struct frame_info *this_frame,
2751 struct frame_id *this_id)
2753 struct arm_prologue_cache *cache;
2755 if (*this_cache == NULL)
2756 *this_cache = arm_make_stub_cache (this_frame);
2757 cache = (struct arm_prologue_cache *) *this_cache;
2759 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2763 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
2767 CORE_ADDR addr_in_block;
2769 CORE_ADDR pc, start_addr;
2772 addr_in_block = get_frame_address_in_block (this_frame);
2773 pc = get_frame_pc (this_frame);
2774 if (in_plt_section (addr_in_block)
2775 /* We also use the stub winder if the target memory is unreadable
2776 to avoid having the prologue unwinder trying to read it. */
2777 || target_read_memory (pc, dummy, 4) != 0)
2780 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2781 && arm_skip_bx_reg (this_frame, pc) != 0)
2787 struct frame_unwind arm_stub_unwind = {
2789 default_frame_unwind_stop_reason,
2791 arm_prologue_prev_register,
2793 arm_stub_unwind_sniffer
2796 /* Put here the code to store, into CACHE->saved_regs, the addresses
2797 of the saved registers of frame described by THIS_FRAME. CACHE is
2800 static struct arm_prologue_cache *
2801 arm_m_exception_cache (struct frame_info *this_frame)
2803 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2805 struct arm_prologue_cache *cache;
2806 CORE_ADDR unwound_sp;
2809 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2810 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2812 unwound_sp = get_frame_register_unsigned (this_frame,
2815 /* The hardware saves eight 32-bit words, comprising xPSR,
2816 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2817 "B1.5.6 Exception entry behavior" in
2818 "ARMv7-M Architecture Reference Manual". */
2819 cache->saved_regs[0].addr = unwound_sp;
2820 cache->saved_regs[1].addr = unwound_sp + 4;
2821 cache->saved_regs[2].addr = unwound_sp + 8;
2822 cache->saved_regs[3].addr = unwound_sp + 12;
2823 cache->saved_regs[12].addr = unwound_sp + 16;
2824 cache->saved_regs[14].addr = unwound_sp + 20;
2825 cache->saved_regs[15].addr = unwound_sp + 24;
2826 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2828 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2829 aligner between the top of the 32-byte stack frame and the
2830 previous context's stack pointer. */
2831 cache->prev_sp = unwound_sp + 32;
2832 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2833 && (xpsr & (1 << 9)) != 0)
2834 cache->prev_sp += 4;
2839 /* Implementation of function hook 'this_id' in
2840 'struct frame_uwnind'. */
2843 arm_m_exception_this_id (struct frame_info *this_frame,
2845 struct frame_id *this_id)
2847 struct arm_prologue_cache *cache;
2849 if (*this_cache == NULL)
2850 *this_cache = arm_m_exception_cache (this_frame);
2851 cache = (struct arm_prologue_cache *) *this_cache;
2853 /* Our frame ID for a stub frame is the current SP and LR. */
2854 *this_id = frame_id_build (cache->prev_sp,
2855 get_frame_pc (this_frame));
2858 /* Implementation of function hook 'prev_register' in
2859 'struct frame_uwnind'. */
2861 static struct value *
2862 arm_m_exception_prev_register (struct frame_info *this_frame,
2866 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2867 struct arm_prologue_cache *cache;
2869 if (*this_cache == NULL)
2870 *this_cache = arm_m_exception_cache (this_frame);
2871 cache = (struct arm_prologue_cache *) *this_cache;
2873 /* The value was already reconstructed into PREV_SP. */
2874 if (prev_regnum == ARM_SP_REGNUM)
2875 return frame_unwind_got_constant (this_frame, prev_regnum,
2878 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2882 /* Implementation of function hook 'sniffer' in
2883 'struct frame_uwnind'. */
2886 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2887 struct frame_info *this_frame,
2888 void **this_prologue_cache)
2890 CORE_ADDR this_pc = get_frame_pc (this_frame);
2892 /* No need to check is_m; this sniffer is only registered for
2893 M-profile architectures. */
2895 /* Exception frames return to one of these magic PCs. Other values
2896 are not defined as of v7-M. See details in "B1.5.8 Exception
2897 return behavior" in "ARMv7-M Architecture Reference Manual". */
2898 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2899 || this_pc == 0xfffffffd)
2905 /* Frame unwinder for M-profile exceptions. */
2907 struct frame_unwind arm_m_exception_unwind =
2910 default_frame_unwind_stop_reason,
2911 arm_m_exception_this_id,
2912 arm_m_exception_prev_register,
2914 arm_m_exception_unwind_sniffer
2918 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2920 struct arm_prologue_cache *cache;
2922 if (*this_cache == NULL)
2923 *this_cache = arm_make_prologue_cache (this_frame);
2924 cache = (struct arm_prologue_cache *) *this_cache;
2926 return cache->prev_sp - cache->framesize;
2929 struct frame_base arm_normal_base = {
2930 &arm_prologue_unwind,
2931 arm_normal_frame_base,
2932 arm_normal_frame_base,
2933 arm_normal_frame_base
2936 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2937 dummy frame. The frame ID's base needs to match the TOS value
2938 saved by save_dummy_frame_tos() and returned from
2939 arm_push_dummy_call, and the PC needs to match the dummy frame's
2942 static struct frame_id
2943 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2945 return frame_id_build (get_frame_register_unsigned (this_frame,
2947 get_frame_pc (this_frame));
2950 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2951 be used to construct the previous frame's ID, after looking up the
2952 containing function). */
2955 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2958 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2959 return arm_addr_bits_remove (gdbarch, pc);
2963 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2965 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2968 static struct value *
2969 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2972 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2974 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2979 /* The PC is normally copied from the return column, which
2980 describes saves of LR. However, that version may have an
2981 extra bit set to indicate Thumb state. The bit is not
2983 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2984 return frame_unwind_got_constant (this_frame, regnum,
2985 arm_addr_bits_remove (gdbarch, lr));
2988 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2989 cpsr = get_frame_register_unsigned (this_frame, regnum);
2990 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2991 if (IS_THUMB_ADDR (lr))
2995 return frame_unwind_got_constant (this_frame, regnum, cpsr);
2998 internal_error (__FILE__, __LINE__,
2999 _("Unexpected register %d"), regnum);
3004 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3005 struct dwarf2_frame_state_reg *reg,
3006 struct frame_info *this_frame)
3012 reg->how = DWARF2_FRAME_REG_FN;
3013 reg->loc.fn = arm_dwarf2_prev_register;
3016 reg->how = DWARF2_FRAME_REG_CFA;
3021 /* Implement the stack_frame_destroyed_p gdbarch method. */
3024 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3026 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3027 unsigned int insn, insn2;
3028 int found_return = 0, found_stack_adjust = 0;
3029 CORE_ADDR func_start, func_end;
3033 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3036 /* The epilogue is a sequence of instructions along the following lines:
3038 - add stack frame size to SP or FP
3039 - [if frame pointer used] restore SP from FP
3040 - restore registers from SP [may include PC]
3041 - a return-type instruction [if PC wasn't already restored]
3043 In a first pass, we scan forward from the current PC and verify the
3044 instructions we find as compatible with this sequence, ending in a
3047 However, this is not sufficient to distinguish indirect function calls
3048 within a function from indirect tail calls in the epilogue in some cases.
3049 Therefore, if we didn't already find any SP-changing instruction during
3050 forward scan, we add a backward scanning heuristic to ensure we actually
3051 are in the epilogue. */
3054 while (scan_pc < func_end && !found_return)
3056 if (target_read_memory (scan_pc, buf, 2))
3060 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3062 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3064 else if (insn == 0x46f7) /* mov pc, lr */
3066 else if (thumb_instruction_restores_sp (insn))
3068 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3071 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3073 if (target_read_memory (scan_pc, buf, 2))
3077 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3079 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3081 if (insn2 & 0x8000) /* <registers> include PC. */
3084 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3085 && (insn2 & 0x0fff) == 0x0b04)
3087 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3090 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3091 && (insn2 & 0x0e00) == 0x0a00)
3103 /* Since any instruction in the epilogue sequence, with the possible
3104 exception of return itself, updates the stack pointer, we need to
3105 scan backwards for at most one instruction. Try either a 16-bit or
3106 a 32-bit instruction. This is just a heuristic, so we do not worry
3107 too much about false positives. */
3109 if (pc - 4 < func_start)
3111 if (target_read_memory (pc - 4, buf, 4))
3114 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3115 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3117 if (thumb_instruction_restores_sp (insn2))
3118 found_stack_adjust = 1;
3119 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3120 found_stack_adjust = 1;
3121 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3122 && (insn2 & 0x0fff) == 0x0b04)
3123 found_stack_adjust = 1;
3124 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3125 && (insn2 & 0x0e00) == 0x0a00)
3126 found_stack_adjust = 1;
3128 return found_stack_adjust;
3131 /* Implement the stack_frame_destroyed_p gdbarch method. */
3134 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3139 CORE_ADDR func_start, func_end;
3141 if (arm_pc_is_thumb (gdbarch, pc))
3142 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3144 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3147 /* We are in the epilogue if the previous instruction was a stack
3148 adjustment and the next instruction is a possible return (bx, mov
3149 pc, or pop). We could have to scan backwards to find the stack
3150 adjustment, or forwards to find the return, but this is a decent
3151 approximation. First scan forwards. */
3154 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3155 if (bits (insn, 28, 31) != INST_NV)
3157 if ((insn & 0x0ffffff0) == 0x012fff10)
3160 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3163 else if ((insn & 0x0fff0000) == 0x08bd0000
3164 && (insn & 0x0000c000) != 0)
3165 /* POP (LDMIA), including PC or LR. */
3172 /* Scan backwards. This is just a heuristic, so do not worry about
3173 false positives from mode changes. */
3175 if (pc < func_start + 4)
3178 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3179 if (arm_instruction_restores_sp (insn))
3186 /* When arguments must be pushed onto the stack, they go on in reverse
3187 order. The code below implements a FILO (stack) to do this. */
3192 struct stack_item *prev;
3196 static struct stack_item *
3197 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3199 struct stack_item *si;
3200 si = XNEW (struct stack_item);
3201 si->data = (gdb_byte *) xmalloc (len);
3204 memcpy (si->data, contents, len);
3208 static struct stack_item *
3209 pop_stack_item (struct stack_item *si)
3211 struct stack_item *dead = si;
3219 /* Return the alignment (in bytes) of the given type. */
3222 arm_type_align (struct type *t)
3228 t = check_typedef (t);
3229 switch (TYPE_CODE (t))
3232 /* Should never happen. */
3233 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3237 case TYPE_CODE_ENUM:
3241 case TYPE_CODE_RANGE:
3243 case TYPE_CODE_CHAR:
3244 case TYPE_CODE_BOOL:
3245 return TYPE_LENGTH (t);
3247 case TYPE_CODE_ARRAY:
3248 if (TYPE_VECTOR (t))
3250 /* Use the natural alignment for vector types (the same for
3251 scalar type), but the maximum alignment is 64-bit. */
3252 if (TYPE_LENGTH (t) > 8)
3255 return TYPE_LENGTH (t);
3258 return arm_type_align (TYPE_TARGET_TYPE (t));
3259 case TYPE_CODE_COMPLEX:
3260 return arm_type_align (TYPE_TARGET_TYPE (t));
3262 case TYPE_CODE_STRUCT:
3263 case TYPE_CODE_UNION:
3265 for (n = 0; n < TYPE_NFIELDS (t); n++)
3267 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3275 /* Possible base types for a candidate for passing and returning in
3278 enum arm_vfp_cprc_base_type
3287 /* The length of one element of base type B. */
3290 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3294 case VFP_CPRC_SINGLE:
3296 case VFP_CPRC_DOUBLE:
3298 case VFP_CPRC_VEC64:
3300 case VFP_CPRC_VEC128:
3303 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3308 /* The character ('s', 'd' or 'q') for the type of VFP register used
3309 for passing base type B. */
3312 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3316 case VFP_CPRC_SINGLE:
3318 case VFP_CPRC_DOUBLE:
3320 case VFP_CPRC_VEC64:
3322 case VFP_CPRC_VEC128:
3325 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3330 /* Determine whether T may be part of a candidate for passing and
3331 returning in VFP registers, ignoring the limit on the total number
3332 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3333 classification of the first valid component found; if it is not
3334 VFP_CPRC_UNKNOWN, all components must have the same classification
3335 as *BASE_TYPE. If it is found that T contains a type not permitted
3336 for passing and returning in VFP registers, a type differently
3337 classified from *BASE_TYPE, or two types differently classified
3338 from each other, return -1, otherwise return the total number of
3339 base-type elements found (possibly 0 in an empty structure or
3340 array). Vector types are not currently supported, matching the
3341 generic AAPCS support. */
3344 arm_vfp_cprc_sub_candidate (struct type *t,
3345 enum arm_vfp_cprc_base_type *base_type)
3347 t = check_typedef (t);
3348 switch (TYPE_CODE (t))
3351 switch (TYPE_LENGTH (t))
3354 if (*base_type == VFP_CPRC_UNKNOWN)
3355 *base_type = VFP_CPRC_SINGLE;
3356 else if (*base_type != VFP_CPRC_SINGLE)
3361 if (*base_type == VFP_CPRC_UNKNOWN)
3362 *base_type = VFP_CPRC_DOUBLE;
3363 else if (*base_type != VFP_CPRC_DOUBLE)
3372 case TYPE_CODE_COMPLEX:
3373 /* Arguments of complex T where T is one of the types float or
3374 double get treated as if they are implemented as:
3383 switch (TYPE_LENGTH (t))
3386 if (*base_type == VFP_CPRC_UNKNOWN)
3387 *base_type = VFP_CPRC_SINGLE;
3388 else if (*base_type != VFP_CPRC_SINGLE)
3393 if (*base_type == VFP_CPRC_UNKNOWN)
3394 *base_type = VFP_CPRC_DOUBLE;
3395 else if (*base_type != VFP_CPRC_DOUBLE)
3404 case TYPE_CODE_ARRAY:
3406 if (TYPE_VECTOR (t))
3408 /* A 64-bit or 128-bit containerized vector type are VFP
3410 switch (TYPE_LENGTH (t))
3413 if (*base_type == VFP_CPRC_UNKNOWN)
3414 *base_type = VFP_CPRC_VEC64;
3417 if (*base_type == VFP_CPRC_UNKNOWN)
3418 *base_type = VFP_CPRC_VEC128;
3429 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3433 if (TYPE_LENGTH (t) == 0)
3435 gdb_assert (count == 0);
3438 else if (count == 0)
3440 unitlen = arm_vfp_cprc_unit_length (*base_type);
3441 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3442 return TYPE_LENGTH (t) / unitlen;
3447 case TYPE_CODE_STRUCT:
3452 for (i = 0; i < TYPE_NFIELDS (t); i++)
3454 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3456 if (sub_count == -1)
3460 if (TYPE_LENGTH (t) == 0)
3462 gdb_assert (count == 0);
3465 else if (count == 0)
3467 unitlen = arm_vfp_cprc_unit_length (*base_type);
3468 if (TYPE_LENGTH (t) != unitlen * count)
3473 case TYPE_CODE_UNION:
3478 for (i = 0; i < TYPE_NFIELDS (t); i++)
3480 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3482 if (sub_count == -1)
3484 count = (count > sub_count ? count : sub_count);
3486 if (TYPE_LENGTH (t) == 0)
3488 gdb_assert (count == 0);
3491 else if (count == 0)
3493 unitlen = arm_vfp_cprc_unit_length (*base_type);
3494 if (TYPE_LENGTH (t) != unitlen * count)
3506 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3507 if passed to or returned from a non-variadic function with the VFP
3508 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3509 *BASE_TYPE to the base type for T and *COUNT to the number of
3510 elements of that base type before returning. */
3513 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3516 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3517 int c = arm_vfp_cprc_sub_candidate (t, &b);
3518 if (c <= 0 || c > 4)
3525 /* Return 1 if the VFP ABI should be used for passing arguments to and
3526 returning values from a function of type FUNC_TYPE, 0
3530 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3532 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3533 /* Variadic functions always use the base ABI. Assume that functions
3534 without debug info are not variadic. */
3535 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3537 /* The VFP ABI is only supported as a variant of AAPCS. */
3538 if (tdep->arm_abi != ARM_ABI_AAPCS)
3540 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3543 /* We currently only support passing parameters in integer registers, which
3544 conforms with GCC's default model, and VFP argument passing following
3545 the VFP variant of AAPCS. Several other variants exist and
3546 we should probably support some of them based on the selected ABI. */
3549 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3550 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3551 struct value **args, CORE_ADDR sp, int struct_return,
3552 CORE_ADDR struct_addr)
3554 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3558 struct stack_item *si = NULL;
3561 unsigned vfp_regs_free = (1 << 16) - 1;
3563 /* Determine the type of this function and whether the VFP ABI
3565 ftype = check_typedef (value_type (function));
3566 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3567 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3568 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3570 /* Set the return address. For the ARM, the return breakpoint is
3571 always at BP_ADDR. */
3572 if (arm_pc_is_thumb (gdbarch, bp_addr))
3574 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3576 /* Walk through the list of args and determine how large a temporary
3577 stack is required. Need to take care here as structs may be
3578 passed on the stack, and we have to push them. */
3581 argreg = ARM_A1_REGNUM;
3584 /* The struct_return pointer occupies the first parameter
3585 passing register. */
3589 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3590 gdbarch_register_name (gdbarch, argreg),
3591 paddress (gdbarch, struct_addr));
3592 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3596 for (argnum = 0; argnum < nargs; argnum++)
3599 struct type *arg_type;
3600 struct type *target_type;
3601 enum type_code typecode;
3602 const bfd_byte *val;
3604 enum arm_vfp_cprc_base_type vfp_base_type;
3606 int may_use_core_reg = 1;
3608 arg_type = check_typedef (value_type (args[argnum]));
3609 len = TYPE_LENGTH (arg_type);
3610 target_type = TYPE_TARGET_TYPE (arg_type);
3611 typecode = TYPE_CODE (arg_type);
3612 val = value_contents (args[argnum]);
3614 align = arm_type_align (arg_type);
3615 /* Round alignment up to a whole number of words. */
3616 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3617 /* Different ABIs have different maximum alignments. */
3618 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3620 /* The APCS ABI only requires word alignment. */
3621 align = INT_REGISTER_SIZE;
3625 /* The AAPCS requires at most doubleword alignment. */
3626 if (align > INT_REGISTER_SIZE * 2)
3627 align = INT_REGISTER_SIZE * 2;
3631 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3639 /* Because this is a CPRC it cannot go in a core register or
3640 cause a core register to be skipped for alignment.
3641 Either it goes in VFP registers and the rest of this loop
3642 iteration is skipped for this argument, or it goes on the
3643 stack (and the stack alignment code is correct for this
3645 may_use_core_reg = 0;
3647 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3648 shift = unit_length / 4;
3649 mask = (1 << (shift * vfp_base_count)) - 1;
3650 for (regno = 0; regno < 16; regno += shift)
3651 if (((vfp_regs_free >> regno) & mask) == mask)
3660 vfp_regs_free &= ~(mask << regno);
3661 reg_scaled = regno / shift;
3662 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3663 for (i = 0; i < vfp_base_count; i++)
3667 if (reg_char == 'q')
3668 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3669 val + i * unit_length);
3672 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3673 reg_char, reg_scaled + i);
3674 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3676 regcache_cooked_write (regcache, regnum,
3677 val + i * unit_length);
3684 /* This CPRC could not go in VFP registers, so all VFP
3685 registers are now marked as used. */
3690 /* Push stack padding for dowubleword alignment. */
3691 if (nstack & (align - 1))
3693 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3694 nstack += INT_REGISTER_SIZE;
3697 /* Doubleword aligned quantities must go in even register pairs. */
3698 if (may_use_core_reg
3699 && argreg <= ARM_LAST_ARG_REGNUM
3700 && align > INT_REGISTER_SIZE
3704 /* If the argument is a pointer to a function, and it is a
3705 Thumb function, create a LOCAL copy of the value and set
3706 the THUMB bit in it. */
3707 if (TYPE_CODE_PTR == typecode
3708 && target_type != NULL
3709 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3711 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3712 if (arm_pc_is_thumb (gdbarch, regval))
3714 bfd_byte *copy = (bfd_byte *) alloca (len);
3715 store_unsigned_integer (copy, len, byte_order,
3716 MAKE_THUMB_ADDR (regval));
3721 /* Copy the argument to general registers or the stack in
3722 register-sized pieces. Large arguments are split between
3723 registers and stack. */
3726 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3728 = extract_unsigned_integer (val, partial_len, byte_order);
3730 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3732 /* The argument is being passed in a general purpose
3734 if (byte_order == BFD_ENDIAN_BIG)
3735 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3737 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3739 gdbarch_register_name
3741 phex (regval, INT_REGISTER_SIZE));
3742 regcache_cooked_write_unsigned (regcache, argreg, regval);
3747 gdb_byte buf[INT_REGISTER_SIZE];
3749 memset (buf, 0, sizeof (buf));
3750 store_unsigned_integer (buf, partial_len, byte_order, regval);
3752 /* Push the arguments onto the stack. */
3754 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3756 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3757 nstack += INT_REGISTER_SIZE;
3764 /* If we have an odd number of words to push, then decrement the stack
3765 by one word now, so first stack argument will be dword aligned. */
3772 write_memory (sp, si->data, si->len);
3773 si = pop_stack_item (si);
3776 /* Finally, update teh SP register. */
3777 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3783 /* Always align the frame to an 8-byte boundary. This is required on
3784 some platforms and harmless on the rest. */
3787 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3789 /* Align the stack to eight bytes. */
3790 return sp & ~ (CORE_ADDR) 7;
3794 print_fpu_flags (struct ui_file *file, int flags)
3796 if (flags & (1 << 0))
3797 fputs_filtered ("IVO ", file);
3798 if (flags & (1 << 1))
3799 fputs_filtered ("DVZ ", file);
3800 if (flags & (1 << 2))
3801 fputs_filtered ("OFL ", file);
3802 if (flags & (1 << 3))
3803 fputs_filtered ("UFL ", file);
3804 if (flags & (1 << 4))
3805 fputs_filtered ("INX ", file);
3806 fputc_filtered ('\n', file);
3809 /* Print interesting information about the floating point processor
3810 (if present) or emulator. */
3812 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3813 struct frame_info *frame, const char *args)
3815 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3818 type = (status >> 24) & 127;
3819 if (status & (1 << 31))
3820 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3822 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3823 /* i18n: [floating point unit] mask */
3824 fputs_filtered (_("mask: "), file);
3825 print_fpu_flags (file, status >> 16);
3826 /* i18n: [floating point unit] flags */
3827 fputs_filtered (_("flags: "), file);
3828 print_fpu_flags (file, status);
3831 /* Construct the ARM extended floating point type. */
3832 static struct type *
3833 arm_ext_type (struct gdbarch *gdbarch)
3835 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3837 if (!tdep->arm_ext_type)
3839 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3840 floatformats_arm_ext);
3842 return tdep->arm_ext_type;
3845 static struct type *
3846 arm_neon_double_type (struct gdbarch *gdbarch)
3848 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3850 if (tdep->neon_double_type == NULL)
3852 struct type *t, *elem;
3854 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3856 elem = builtin_type (gdbarch)->builtin_uint8;
3857 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3858 elem = builtin_type (gdbarch)->builtin_uint16;
3859 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3860 elem = builtin_type (gdbarch)->builtin_uint32;
3861 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3862 elem = builtin_type (gdbarch)->builtin_uint64;
3863 append_composite_type_field (t, "u64", elem);
3864 elem = builtin_type (gdbarch)->builtin_float;
3865 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3866 elem = builtin_type (gdbarch)->builtin_double;
3867 append_composite_type_field (t, "f64", elem);
3869 TYPE_VECTOR (t) = 1;
3870 TYPE_NAME (t) = "neon_d";
3871 tdep->neon_double_type = t;
3874 return tdep->neon_double_type;
3877 /* FIXME: The vector types are not correctly ordered on big-endian
3878 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3879 bits of d0 - regardless of what unit size is being held in d0. So
3880 the offset of the first uint8 in d0 is 7, but the offset of the
3881 first float is 4. This code works as-is for little-endian
3884 static struct type *
3885 arm_neon_quad_type (struct gdbarch *gdbarch)
3887 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3889 if (tdep->neon_quad_type == NULL)
3891 struct type *t, *elem;
3893 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3895 elem = builtin_type (gdbarch)->builtin_uint8;
3896 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3897 elem = builtin_type (gdbarch)->builtin_uint16;
3898 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3899 elem = builtin_type (gdbarch)->builtin_uint32;
3900 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3901 elem = builtin_type (gdbarch)->builtin_uint64;
3902 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3903 elem = builtin_type (gdbarch)->builtin_float;
3904 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3905 elem = builtin_type (gdbarch)->builtin_double;
3906 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3908 TYPE_VECTOR (t) = 1;
3909 TYPE_NAME (t) = "neon_q";
3910 tdep->neon_quad_type = t;
3913 return tdep->neon_quad_type;
3916 /* Return the GDB type object for the "standard" data type of data in
3919 static struct type *
3920 arm_register_type (struct gdbarch *gdbarch, int regnum)
3922 int num_regs = gdbarch_num_regs (gdbarch);
3924 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3925 && regnum >= num_regs && regnum < num_regs + 32)
3926 return builtin_type (gdbarch)->builtin_float;
3928 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3929 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3930 return arm_neon_quad_type (gdbarch);
3932 /* If the target description has register information, we are only
3933 in this function so that we can override the types of
3934 double-precision registers for NEON. */
3935 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3937 struct type *t = tdesc_register_type (gdbarch, regnum);
3939 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3940 && TYPE_CODE (t) == TYPE_CODE_FLT
3941 && gdbarch_tdep (gdbarch)->have_neon)
3942 return arm_neon_double_type (gdbarch);
3947 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3949 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3950 return builtin_type (gdbarch)->builtin_void;
3952 return arm_ext_type (gdbarch);
3954 else if (regnum == ARM_SP_REGNUM)
3955 return builtin_type (gdbarch)->builtin_data_ptr;
3956 else if (regnum == ARM_PC_REGNUM)
3957 return builtin_type (gdbarch)->builtin_func_ptr;
3958 else if (regnum >= ARRAY_SIZE (arm_register_names))
3959 /* These registers are only supported on targets which supply
3960 an XML description. */
3961 return builtin_type (gdbarch)->builtin_int0;
3963 return builtin_type (gdbarch)->builtin_uint32;
3966 /* Map a DWARF register REGNUM onto the appropriate GDB register
3970 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3972 /* Core integer regs. */
3973 if (reg >= 0 && reg <= 15)
3976 /* Legacy FPA encoding. These were once used in a way which
3977 overlapped with VFP register numbering, so their use is
3978 discouraged, but GDB doesn't support the ARM toolchain
3979 which used them for VFP. */
3980 if (reg >= 16 && reg <= 23)
3981 return ARM_F0_REGNUM + reg - 16;
3983 /* New assignments for the FPA registers. */
3984 if (reg >= 96 && reg <= 103)
3985 return ARM_F0_REGNUM + reg - 96;
3987 /* WMMX register assignments. */
3988 if (reg >= 104 && reg <= 111)
3989 return ARM_WCGR0_REGNUM + reg - 104;
3991 if (reg >= 112 && reg <= 127)
3992 return ARM_WR0_REGNUM + reg - 112;
3994 if (reg >= 192 && reg <= 199)
3995 return ARM_WC0_REGNUM + reg - 192;
3997 /* VFP v2 registers. A double precision value is actually
3998 in d1 rather than s2, but the ABI only defines numbering
3999 for the single precision registers. This will "just work"
4000 in GDB for little endian targets (we'll read eight bytes,
4001 starting in s0 and then progressing to s1), but will be
4002 reversed on big endian targets with VFP. This won't
4003 be a problem for the new Neon quad registers; you're supposed
4004 to use DW_OP_piece for those. */
4005 if (reg >= 64 && reg <= 95)
4009 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4010 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4014 /* VFP v3 / Neon registers. This range is also used for VFP v2
4015 registers, except that it now describes d0 instead of s0. */
4016 if (reg >= 256 && reg <= 287)
4020 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4021 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4028 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4030 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4033 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4035 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4036 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4038 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4039 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4041 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4042 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4044 if (reg < NUM_GREGS)
4045 return SIM_ARM_R0_REGNUM + reg;
4048 if (reg < NUM_FREGS)
4049 return SIM_ARM_FP0_REGNUM + reg;
4052 if (reg < NUM_SREGS)
4053 return SIM_ARM_FPS_REGNUM + reg;
4056 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4059 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4060 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4061 It is thought that this is is the floating-point register format on
4062 little-endian systems. */
4065 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4066 void *dbl, int endianess)
4070 if (endianess == BFD_ENDIAN_BIG)
4071 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4073 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4075 floatformat_from_doublest (fmt, &d, dbl);
4079 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4084 floatformat_to_doublest (fmt, ptr, &d);
4085 if (endianess == BFD_ENDIAN_BIG)
4086 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4088 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4092 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4093 of the appropriate mode (as encoded in the PC value), even if this
4094 differs from what would be expected according to the symbol tables. */
4097 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4098 struct address_space *aspace,
4101 struct cleanup *old_chain
4102 = make_cleanup_restore_integer (&arm_override_mode);
4104 arm_override_mode = IS_THUMB_ADDR (pc);
4105 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4107 insert_single_step_breakpoint (gdbarch, aspace, pc);
4109 do_cleanups (old_chain);
4112 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
4117 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4121 int bytes_to_read = new_len - old_len;
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4126 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4134 /* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138 #define MAX_IT_BLOCK_PREFIX 14
4140 /* Use a quick scan if there are more than this many bytes of
4142 #define IT_SCAN_THRESHOLD 32
4144 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4148 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4152 CORE_ADDR boundary, func_start;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4186 /* No room for an IT instruction. */
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4193 for (i = 0; i < buf_len; i += 2)
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4226 buf_len = IT_SCAN_THRESHOLD;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4250 buf_len = bpaddr - boundary;
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4259 buf_len = bpaddr - boundary;
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4275 else if (inst1 & 0x0002)
4277 else if (inst1 & 0x0004)
4282 i += thumb_insn_size (inst1);
4288 /* There wasn't really an IT instruction after all. */
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4297 return bpaddr - buf_len + last_it;
4300 /* ARM displaced stepping support.
4302 Generally ARM displaced stepping works as follows:
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
4306 Depending on the type of instruction, it is then copied to a scratch
4307 location, possibly in a modified form. The copy_* set of functions
4308 performs such modification, as necessary. A breakpoint is placed after
4309 the modified instruction in the scratch space to return control to GDB.
4310 Note in particular that instructions which modify the PC will no longer
4311 do so after modification.
4313 2. The instruction is single-stepped, by setting the PC to the scratch
4314 location address, and resuming. Control returns to GDB when the
4317 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4318 function used for the current instruction. This function's job is to
4319 put the CPU/memory state back to what it would have been if the
4320 instruction had been executed unmodified in its original location. */
4322 /* NOP instruction (mov r0, r0). */
4323 #define ARM_NOP 0xe1a00000
4324 #define THUMB_NOP 0x4600
4326 /* Helper for register reads for displaced stepping. In particular, this
4327 returns the PC as it would be seen by the instruction at its original
4331 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4335 CORE_ADDR from = dsc->insn_addr;
4337 if (regno == ARM_PC_REGNUM)
4339 /* Compute pipeline offset:
4340 - When executing an ARM instruction, PC reads as the address of the
4341 current instruction plus 8.
4342 - When executing a Thumb instruction, PC reads as the address of the
4343 current instruction plus 4. */
4350 if (debug_displaced)
4351 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4352 (unsigned long) from);
4353 return (ULONGEST) from;
4357 regcache_cooked_read_unsigned (regs, regno, &ret);
4358 if (debug_displaced)
4359 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4360 regno, (unsigned long) ret);
4366 displaced_in_arm_mode (struct regcache *regs)
4369 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4371 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4373 return (ps & t_bit) == 0;
4376 /* Write to the PC as from a branch instruction. */
4379 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4383 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4384 architecture versions < 6. */
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x3);
4388 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4389 val & ~(ULONGEST) 0x1);
4392 /* Write to the PC as from a branch-exchange instruction. */
4395 bx_write_pc (struct regcache *regs, ULONGEST val)
4398 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4400 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4404 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4405 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4407 else if ((val & 2) == 0)
4409 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4410 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4414 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4415 mode, align dest to 4 bytes). */
4416 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4417 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4418 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4422 /* Write to the PC as if from a load instruction. */
4425 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4428 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4429 bx_write_pc (regs, val);
4431 branch_write_pc (regs, dsc, val);
4434 /* Write to the PC as if from an ALU instruction. */
4437 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4441 bx_write_pc (regs, val);
4443 branch_write_pc (regs, dsc, val);
4446 /* Helper for writing to registers for displaced stepping. Writing to the PC
4447 has a varying effects depending on the instruction which does the write:
4448 this is controlled by the WRITE_PC argument. */
4451 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4452 int regno, ULONGEST val, enum pc_write_style write_pc)
4454 if (regno == ARM_PC_REGNUM)
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4458 (unsigned long) val);
4461 case BRANCH_WRITE_PC:
4462 branch_write_pc (regs, dsc, val);
4466 bx_write_pc (regs, val);
4470 load_write_pc (regs, dsc, val);
4474 alu_write_pc (regs, dsc, val);
4477 case CANNOT_WRITE_PC:
4478 warning (_("Instruction wrote to PC in an unexpected way when "
4479 "single-stepping"));
4483 internal_error (__FILE__, __LINE__,
4484 _("Invalid argument to displaced_write_reg"));
4487 dsc->wrote_to_pc = 1;
4491 if (debug_displaced)
4492 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4493 regno, (unsigned long) val);
4494 regcache_cooked_write_unsigned (regs, regno, val);
4498 /* This function is used to concisely determine if an instruction INSN
4499 references PC. Register fields of interest in INSN should have the
4500 corresponding fields of BITMASK set to 0b1111. The function
4501 returns return 1 if any of these fields in INSN reference the PC
4502 (also 0b1111, r15), else it returns 0. */
4505 insn_references_pc (uint32_t insn, uint32_t bitmask)
4507 uint32_t lowbit = 1;
4509 while (bitmask != 0)
4513 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4519 mask = lowbit * 0xf;
4521 if ((insn & mask) == mask)
4530 /* The simplest copy function. Many instructions have the same effect no
4531 matter what address they are executed at: in those cases, use this. */
4534 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4535 const char *iname, struct displaced_step_closure *dsc)
4537 if (debug_displaced)
4538 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4539 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4542 dsc->modinsn[0] = insn;
4548 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4549 uint16_t insn2, const char *iname,
4550 struct displaced_step_closure *dsc)
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4554 "opcode/class '%s' unmodified\n", insn1, insn2,
4557 dsc->modinsn[0] = insn1;
4558 dsc->modinsn[1] = insn2;
4564 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4567 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
4569 struct displaced_step_closure *dsc)
4571 if (debug_displaced)
4572 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4573 "opcode/class '%s' unmodified\n", insn,
4576 dsc->modinsn[0] = insn;
4581 /* Preload instructions with immediate offset. */
4584 cleanup_preload (struct gdbarch *gdbarch,
4585 struct regcache *regs, struct displaced_step_closure *dsc)
4587 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4588 if (!dsc->u.preload.immed)
4589 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4593 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4594 struct displaced_step_closure *dsc, unsigned int rn)
4597 /* Preload instructions:
4599 {pli/pld} [rn, #+/-imm]
4601 {pli/pld} [r0, #+/-imm]. */
4603 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4604 rn_val = displaced_read_reg (regs, dsc, rn);
4605 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4606 dsc->u.preload.immed = 1;
4608 dsc->cleanup = &cleanup_preload;
4612 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4613 struct displaced_step_closure *dsc)
4615 unsigned int rn = bits (insn, 16, 19);
4617 if (!insn_references_pc (insn, 0x000f0000ul))
4618 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4620 if (debug_displaced)
4621 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4622 (unsigned long) insn);
4624 dsc->modinsn[0] = insn & 0xfff0ffff;
4626 install_preload (gdbarch, regs, dsc, rn);
4632 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4633 struct regcache *regs, struct displaced_step_closure *dsc)
4635 unsigned int rn = bits (insn1, 0, 3);
4636 unsigned int u_bit = bit (insn1, 7);
4637 int imm12 = bits (insn2, 0, 11);
4640 if (rn != ARM_PC_REGNUM)
4641 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4643 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4644 PLD (literal) Encoding T1. */
4645 if (debug_displaced)
4646 fprintf_unfiltered (gdb_stdlog,
4647 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4648 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4654 /* Rewrite instruction {pli/pld} PC imm12 into:
4655 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4659 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4661 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4662 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4666 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4667 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4668 dsc->u.preload.immed = 0;
4670 /* {pli/pld} [r0, r1] */
4671 dsc->modinsn[0] = insn1 & 0xfff0;
4672 dsc->modinsn[1] = 0xf001;
4675 dsc->cleanup = &cleanup_preload;
4679 /* Preload instructions with register offset. */
4682 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4683 struct displaced_step_closure *dsc, unsigned int rn,
4686 ULONGEST rn_val, rm_val;
4688 /* Preload register-offset instructions:
4690 {pli/pld} [rn, rm {, shift}]
4692 {pli/pld} [r0, r1 {, shift}]. */
4694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4695 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4696 rn_val = displaced_read_reg (regs, dsc, rn);
4697 rm_val = displaced_read_reg (regs, dsc, rm);
4698 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4699 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4700 dsc->u.preload.immed = 0;
4702 dsc->cleanup = &cleanup_preload;
4706 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4707 struct regcache *regs,
4708 struct displaced_step_closure *dsc)
4710 unsigned int rn = bits (insn, 16, 19);
4711 unsigned int rm = bits (insn, 0, 3);
4714 if (!insn_references_pc (insn, 0x000f000ful))
4715 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4717 if (debug_displaced)
4718 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4719 (unsigned long) insn);
4721 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4723 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4727 /* Copy/cleanup coprocessor load and store instructions. */
4730 cleanup_copro_load_store (struct gdbarch *gdbarch,
4731 struct regcache *regs,
4732 struct displaced_step_closure *dsc)
4734 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4736 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4738 if (dsc->u.ldst.writeback)
4739 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4743 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4744 struct displaced_step_closure *dsc,
4745 int writeback, unsigned int rn)
4749 /* Coprocessor load/store instructions:
4751 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4753 {stc/stc2} [r0, #+/-imm].
4755 ldc/ldc2 are handled identically. */
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 rn_val = displaced_read_reg (regs, dsc, rn);
4759 /* PC should be 4-byte aligned. */
4760 rn_val = rn_val & 0xfffffffc;
4761 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4763 dsc->u.ldst.writeback = writeback;
4764 dsc->u.ldst.rn = rn;
4766 dsc->cleanup = &cleanup_copro_load_store;
4770 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4771 struct regcache *regs,
4772 struct displaced_step_closure *dsc)
4774 unsigned int rn = bits (insn, 16, 19);
4776 if (!insn_references_pc (insn, 0x000f0000ul))
4777 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4781 "load/store insn %.8lx\n", (unsigned long) insn);
4783 dsc->modinsn[0] = insn & 0xfff0ffff;
4785 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4791 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4792 uint16_t insn2, struct regcache *regs,
4793 struct displaced_step_closure *dsc)
4795 unsigned int rn = bits (insn1, 0, 3);
4797 if (rn != ARM_PC_REGNUM)
4798 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4799 "copro load/store", dsc);
4801 if (debug_displaced)
4802 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4803 "load/store insn %.4x%.4x\n", insn1, insn2);
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = insn2;
4809 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4810 doesn't support writeback, so pass 0. */
4811 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4816 /* Clean up branch instructions (actually perform the branch, by setting
4820 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4821 struct displaced_step_closure *dsc)
4823 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4824 int branch_taken = condition_true (dsc->u.branch.cond, status);
4825 enum pc_write_style write_pc = dsc->u.branch.exchange
4826 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4831 if (dsc->u.branch.link)
4833 /* The value of LR should be the next insn of current one. In order
4834 not to confuse logic hanlding later insn `bx lr', if current insn mode
4835 is Thumb, the bit 0 of LR value should be set to 1. */
4836 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4839 next_insn_addr |= 0x1;
4841 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4845 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4848 /* Copy B/BL/BLX instructions with immediate destinations. */
4851 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4852 struct displaced_step_closure *dsc,
4853 unsigned int cond, int exchange, int link, long offset)
4855 /* Implement "BL<cond> <label>" as:
4857 Preparation: cond <- instruction condition
4858 Insn: mov r0, r0 (nop)
4859 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4861 B<cond> similar, but don't set r14 in cleanup. */
4863 dsc->u.branch.cond = cond;
4864 dsc->u.branch.link = link;
4865 dsc->u.branch.exchange = exchange;
4867 dsc->u.branch.dest = dsc->insn_addr;
4868 if (link && exchange)
4869 /* For BLX, offset is computed from the Align (PC, 4). */
4870 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4873 dsc->u.branch.dest += 4 + offset;
4875 dsc->u.branch.dest += 8 + offset;
4877 dsc->cleanup = &cleanup_branch;
4880 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs, struct displaced_step_closure *dsc)
4883 unsigned int cond = bits (insn, 28, 31);
4884 int exchange = (cond == 0xf);
4885 int link = exchange || bit (insn, 24);
4888 if (debug_displaced)
4889 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4890 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4891 (unsigned long) insn);
4893 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4894 then arrange the switch into Thumb mode. */
4895 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4897 offset = bits (insn, 0, 23) << 2;
4899 if (bit (offset, 25))
4900 offset = offset | ~0x3ffffff;
4902 dsc->modinsn[0] = ARM_NOP;
4904 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4909 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4910 uint16_t insn2, struct regcache *regs,
4911 struct displaced_step_closure *dsc)
4913 int link = bit (insn2, 14);
4914 int exchange = link && !bit (insn2, 12);
4917 int j1 = bit (insn2, 13);
4918 int j2 = bit (insn2, 11);
4919 int s = sbits (insn1, 10, 10);
4920 int i1 = !(j1 ^ bit (insn1, 10));
4921 int i2 = !(j2 ^ bit (insn1, 10));
4923 if (!link && !exchange) /* B */
4925 offset = (bits (insn2, 0, 10) << 1);
4926 if (bit (insn2, 12)) /* Encoding T4 */
4928 offset |= (bits (insn1, 0, 9) << 12)
4934 else /* Encoding T3 */
4936 offset |= (bits (insn1, 0, 5) << 12)
4940 cond = bits (insn1, 6, 9);
4945 offset = (bits (insn1, 0, 9) << 12);
4946 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4947 offset |= exchange ?
4948 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4951 if (debug_displaced)
4952 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4953 "%.4x %.4x with offset %.8lx\n",
4954 link ? (exchange) ? "blx" : "bl" : "b",
4955 insn1, insn2, offset);
4957 dsc->modinsn[0] = THUMB_NOP;
4959 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4963 /* Copy B Thumb instructions. */
4965 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
4966 struct displaced_step_closure *dsc)
4968 unsigned int cond = 0;
4970 unsigned short bit_12_15 = bits (insn, 12, 15);
4971 CORE_ADDR from = dsc->insn_addr;
4973 if (bit_12_15 == 0xd)
4975 /* offset = SignExtend (imm8:0, 32) */
4976 offset = sbits ((insn << 1), 0, 8);
4977 cond = bits (insn, 8, 11);
4979 else if (bit_12_15 == 0xe) /* Encoding T2 */
4981 offset = sbits ((insn << 1), 0, 11);
4985 if (debug_displaced)
4986 fprintf_unfiltered (gdb_stdlog,
4987 "displaced: copying b immediate insn %.4x "
4988 "with offset %d\n", insn, offset);
4990 dsc->u.branch.cond = cond;
4991 dsc->u.branch.link = 0;
4992 dsc->u.branch.exchange = 0;
4993 dsc->u.branch.dest = from + 4 + offset;
4995 dsc->modinsn[0] = THUMB_NOP;
4997 dsc->cleanup = &cleanup_branch;
5002 /* Copy BX/BLX with register-specified destinations. */
5005 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5006 struct displaced_step_closure *dsc, int link,
5007 unsigned int cond, unsigned int rm)
5009 /* Implement {BX,BLX}<cond> <reg>" as:
5011 Preparation: cond <- instruction condition
5012 Insn: mov r0, r0 (nop)
5013 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5015 Don't set r14 in cleanup for BX. */
5017 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5019 dsc->u.branch.cond = cond;
5020 dsc->u.branch.link = link;
5022 dsc->u.branch.exchange = 1;
5024 dsc->cleanup = &cleanup_branch;
5028 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5029 struct regcache *regs, struct displaced_step_closure *dsc)
5031 unsigned int cond = bits (insn, 28, 31);
5034 int link = bit (insn, 5);
5035 unsigned int rm = bits (insn, 0, 3);
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5039 (unsigned long) insn);
5041 dsc->modinsn[0] = ARM_NOP;
5043 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5048 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5049 struct regcache *regs,
5050 struct displaced_step_closure *dsc)
5052 int link = bit (insn, 7);
5053 unsigned int rm = bits (insn, 3, 6);
5055 if (debug_displaced)
5056 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5057 (unsigned short) insn);
5059 dsc->modinsn[0] = THUMB_NOP;
5061 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5067 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5070 cleanup_alu_imm (struct gdbarch *gdbarch,
5071 struct regcache *regs, struct displaced_step_closure *dsc)
5073 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5074 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5076 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5080 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5081 struct displaced_step_closure *dsc)
5083 unsigned int rn = bits (insn, 16, 19);
5084 unsigned int rd = bits (insn, 12, 15);
5085 unsigned int op = bits (insn, 21, 24);
5086 int is_mov = (op == 0xd);
5087 ULONGEST rd_val, rn_val;
5089 if (!insn_references_pc (insn, 0x000ff000ul))
5090 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5092 if (debug_displaced)
5093 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5094 "%.8lx\n", is_mov ? "move" : "ALU",
5095 (unsigned long) insn);
5097 /* Instruction is of form:
5099 <op><cond> rd, [rn,] #imm
5103 Preparation: tmp1, tmp2 <- r0, r1;
5105 Insn: <op><cond> r0, r1, #imm
5106 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5109 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5110 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5111 rn_val = displaced_read_reg (regs, dsc, rn);
5112 rd_val = displaced_read_reg (regs, dsc, rd);
5113 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5114 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5118 dsc->modinsn[0] = insn & 0xfff00fff;
5120 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5122 dsc->cleanup = &cleanup_alu_imm;
5128 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5129 uint16_t insn2, struct regcache *regs,
5130 struct displaced_step_closure *dsc)
5132 unsigned int op = bits (insn1, 5, 8);
5133 unsigned int rn, rm, rd;
5134 ULONGEST rd_val, rn_val;
5136 rn = bits (insn1, 0, 3); /* Rn */
5137 rm = bits (insn2, 0, 3); /* Rm */
5138 rd = bits (insn2, 8, 11); /* Rd */
5140 /* This routine is only called for instruction MOV. */
5141 gdb_assert (op == 0x2 && rn == 0xf);
5143 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5146 if (debug_displaced)
5147 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5148 "ALU", insn1, insn2);
5150 /* Instruction is of form:
5152 <op><cond> rd, [rn,] #imm
5156 Preparation: tmp1, tmp2 <- r0, r1;
5158 Insn: <op><cond> r0, r1, #imm
5159 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5162 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5163 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5164 rn_val = displaced_read_reg (regs, dsc, rn);
5165 rd_val = displaced_read_reg (regs, dsc, rd);
5166 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5167 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5170 dsc->modinsn[0] = insn1;
5171 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5174 dsc->cleanup = &cleanup_alu_imm;
5179 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5182 cleanup_alu_reg (struct gdbarch *gdbarch,
5183 struct regcache *regs, struct displaced_step_closure *dsc)
5188 rd_val = displaced_read_reg (regs, dsc, 0);
5190 for (i = 0; i < 3; i++)
5191 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5193 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5197 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5198 struct displaced_step_closure *dsc,
5199 unsigned int rd, unsigned int rn, unsigned int rm)
5201 ULONGEST rd_val, rn_val, rm_val;
5203 /* Instruction is of form:
5205 <op><cond> rd, [rn,] rm [, <shift>]
5209 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5210 r0, r1, r2 <- rd, rn, rm
5211 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5212 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
5219 rn_val = displaced_read_reg (regs, dsc, rn);
5220 rm_val = displaced_read_reg (regs, dsc, rm);
5221 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5223 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5226 dsc->cleanup = &cleanup_alu_reg;
5230 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5231 struct displaced_step_closure *dsc)
5233 unsigned int op = bits (insn, 21, 24);
5234 int is_mov = (op == 0xd);
5236 if (!insn_references_pc (insn, 0x000ff00ful))
5237 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5239 if (debug_displaced)
5240 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5241 is_mov ? "move" : "ALU", (unsigned long) insn);
5244 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5246 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5248 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5254 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 struct displaced_step_closure *dsc)
5260 rm = bits (insn, 3, 6);
5261 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5263 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5264 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5266 if (debug_displaced)
5267 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5268 (unsigned short) insn);
5270 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5272 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5277 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5280 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5281 struct regcache *regs,
5282 struct displaced_step_closure *dsc)
5284 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5287 for (i = 0; i < 4; i++)
5288 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5290 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5294 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5295 struct displaced_step_closure *dsc,
5296 unsigned int rd, unsigned int rn, unsigned int rm,
5300 ULONGEST rd_val, rn_val, rm_val, rs_val;
5302 /* Instruction is of form:
5304 <op><cond> rd, [rn,] rm, <shift> rs
5308 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5309 r0, r1, r2, r3 <- rd, rn, rm, rs
5310 Insn: <op><cond> r0, r1, r2, <shift> r3
5312 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5316 for (i = 0; i < 4; i++)
5317 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5319 rd_val = displaced_read_reg (regs, dsc, rd);
5320 rn_val = displaced_read_reg (regs, dsc, rn);
5321 rm_val = displaced_read_reg (regs, dsc, rm);
5322 rs_val = displaced_read_reg (regs, dsc, rs);
5323 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5326 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5328 dsc->cleanup = &cleanup_alu_shifted_reg;
5332 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5333 struct regcache *regs,
5334 struct displaced_step_closure *dsc)
5336 unsigned int op = bits (insn, 21, 24);
5337 int is_mov = (op == 0xd);
5338 unsigned int rd, rn, rm, rs;
5340 if (!insn_references_pc (insn, 0x000fff0ful))
5341 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5343 if (debug_displaced)
5344 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5345 "%.8lx\n", is_mov ? "move" : "ALU",
5346 (unsigned long) insn);
5348 rn = bits (insn, 16, 19);
5349 rm = bits (insn, 0, 3);
5350 rs = bits (insn, 8, 11);
5351 rd = bits (insn, 12, 15);
5354 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5356 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5358 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5363 /* Clean up load instructions. */
5366 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5367 struct displaced_step_closure *dsc)
5369 ULONGEST rt_val, rt_val2 = 0, rn_val;
5371 rt_val = displaced_read_reg (regs, dsc, 0);
5372 if (dsc->u.ldst.xfersize == 8)
5373 rt_val2 = displaced_read_reg (regs, dsc, 1);
5374 rn_val = displaced_read_reg (regs, dsc, 2);
5376 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5377 if (dsc->u.ldst.xfersize > 4)
5378 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5379 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5380 if (!dsc->u.ldst.immed)
5381 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5383 /* Handle register writeback. */
5384 if (dsc->u.ldst.writeback)
5385 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5386 /* Put result in right place. */
5387 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5388 if (dsc->u.ldst.xfersize == 8)
5389 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5392 /* Clean up store instructions. */
5395 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5396 struct displaced_step_closure *dsc)
5398 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5400 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5401 if (dsc->u.ldst.xfersize > 4)
5402 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5403 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5404 if (!dsc->u.ldst.immed)
5405 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5406 if (!dsc->u.ldst.restore_r4)
5407 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5410 if (dsc->u.ldst.writeback)
5411 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5414 /* Copy "extra" load/store instructions. These are halfword/doubleword
5415 transfers, which have a different encoding to byte/word transfers. */
5418 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
5419 struct regcache *regs, struct displaced_step_closure *dsc)
5421 unsigned int op1 = bits (insn, 20, 24);
5422 unsigned int op2 = bits (insn, 5, 6);
5423 unsigned int rt = bits (insn, 12, 15);
5424 unsigned int rn = bits (insn, 16, 19);
5425 unsigned int rm = bits (insn, 0, 3);
5426 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5427 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5428 int immed = (op1 & 0x4) != 0;
5430 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5432 if (!insn_references_pc (insn, 0x000ff00ful))
5433 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5435 if (debug_displaced)
5436 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5437 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
5438 (unsigned long) insn);
5440 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5443 internal_error (__FILE__, __LINE__,
5444 _("copy_extra_ld_st: instruction decode error"));
5446 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5447 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5448 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5450 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5452 rt_val = displaced_read_reg (regs, dsc, rt);
5453 if (bytesize[opcode] == 8)
5454 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5455 rn_val = displaced_read_reg (regs, dsc, rn);
5457 rm_val = displaced_read_reg (regs, dsc, rm);
5459 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5460 if (bytesize[opcode] == 8)
5461 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5462 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5464 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5467 dsc->u.ldst.xfersize = bytesize[opcode];
5468 dsc->u.ldst.rn = rn;
5469 dsc->u.ldst.immed = immed;
5470 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5471 dsc->u.ldst.restore_r4 = 0;
5474 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5476 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5477 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5479 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5481 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5482 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5484 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5489 /* Copy byte/half word/word loads and stores. */
5492 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5493 struct displaced_step_closure *dsc, int load,
5494 int immed, int writeback, int size, int usermode,
5495 int rt, int rm, int rn)
5497 ULONGEST rt_val, rn_val, rm_val = 0;
5499 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5500 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5502 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5504 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5506 rt_val = displaced_read_reg (regs, dsc, rt);
5507 rn_val = displaced_read_reg (regs, dsc, rn);
5509 rm_val = displaced_read_reg (regs, dsc, rm);
5511 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5512 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5514 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5516 dsc->u.ldst.xfersize = size;
5517 dsc->u.ldst.rn = rn;
5518 dsc->u.ldst.immed = immed;
5519 dsc->u.ldst.writeback = writeback;
5521 /* To write PC we can do:
5523 Before this sequence of instructions:
5524 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5525 r2 is the Rn value got from dispalced_read_reg.
5527 Insn1: push {pc} Write address of STR instruction + offset on stack
5528 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5529 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5530 = addr(Insn1) + offset - addr(Insn3) - 8
5532 Insn4: add r4, r4, #8 r4 = offset - 8
5533 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5535 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5537 Otherwise we don't know what value to write for PC, since the offset is
5538 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5539 of this can be found in Section "Saving from r15" in
5540 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5542 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5547 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5548 uint16_t insn2, struct regcache *regs,
5549 struct displaced_step_closure *dsc, int size)
5551 unsigned int u_bit = bit (insn1, 7);
5552 unsigned int rt = bits (insn2, 12, 15);
5553 int imm12 = bits (insn2, 0, 11);
5556 if (debug_displaced)
5557 fprintf_unfiltered (gdb_stdlog,
5558 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5559 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5565 /* Rewrite instruction LDR Rt imm12 into:
5567 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5571 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5574 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5575 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5576 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5578 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5580 pc_val = pc_val & 0xfffffffc;
5582 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5583 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5587 dsc->u.ldst.xfersize = size;
5588 dsc->u.ldst.immed = 0;
5589 dsc->u.ldst.writeback = 0;
5590 dsc->u.ldst.restore_r4 = 0;
5592 /* LDR R0, R2, R3 */
5593 dsc->modinsn[0] = 0xf852;
5594 dsc->modinsn[1] = 0x3;
5597 dsc->cleanup = &cleanup_load;
5603 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5604 uint16_t insn2, struct regcache *regs,
5605 struct displaced_step_closure *dsc,
5606 int writeback, int immed)
5608 unsigned int rt = bits (insn2, 12, 15);
5609 unsigned int rn = bits (insn1, 0, 3);
5610 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5611 /* In LDR (register), there is also a register Rm, which is not allowed to
5612 be PC, so we don't have to check it. */
5614 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5615 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5618 if (debug_displaced)
5619 fprintf_unfiltered (gdb_stdlog,
5620 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5621 rt, rn, insn1, insn2);
5623 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5626 dsc->u.ldst.restore_r4 = 0;
5629 /* ldr[b]<cond> rt, [rn, #imm], etc.
5631 ldr[b]<cond> r0, [r2, #imm]. */
5633 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5634 dsc->modinsn[1] = insn2 & 0x0fff;
5637 /* ldr[b]<cond> rt, [rn, rm], etc.
5639 ldr[b]<cond> r0, [r2, r3]. */
5641 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5642 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5652 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5653 struct regcache *regs,
5654 struct displaced_step_closure *dsc,
5655 int load, int size, int usermode)
5657 int immed = !bit (insn, 25);
5658 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5659 unsigned int rt = bits (insn, 12, 15);
5660 unsigned int rn = bits (insn, 16, 19);
5661 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5663 if (!insn_references_pc (insn, 0x000ff00ful))
5664 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5669 load ? (size == 1 ? "ldrb" : "ldr")
5670 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5672 (unsigned long) insn);
5674 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5675 usermode, rt, rm, rn);
5677 if (load || rt != ARM_PC_REGNUM)
5679 dsc->u.ldst.restore_r4 = 0;
5682 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5684 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5685 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5687 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5689 {ldr,str}[b]<cond> r0, [r2, r3]. */
5690 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5694 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5695 dsc->u.ldst.restore_r4 = 1;
5696 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5697 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5698 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5699 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5700 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5704 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5706 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5711 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5716 /* Cleanup LDM instructions with fully-populated register list. This is an
5717 unfortunate corner case: it's impossible to implement correctly by modifying
5718 the instruction. The issue is as follows: we have an instruction,
5722 which we must rewrite to avoid loading PC. A possible solution would be to
5723 do the load in two halves, something like (with suitable cleanup
5727 ldm[id][ab] r8!, {r0-r7}
5729 ldm[id][ab] r8, {r7-r14}
5732 but at present there's no suitable place for <temp>, since the scratch space
5733 is overwritten before the cleanup routine is called. For now, we simply
5734 emulate the instruction. */
5737 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5738 struct displaced_step_closure *dsc)
5740 int inc = dsc->u.block.increment;
5741 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5742 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5743 uint32_t regmask = dsc->u.block.regmask;
5744 int regno = inc ? 0 : 15;
5745 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5746 int exception_return = dsc->u.block.load && dsc->u.block.user
5747 && (regmask & 0x8000) != 0;
5748 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5749 int do_transfer = condition_true (dsc->u.block.cond, status);
5750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5755 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5756 sensible we can do here. Complain loudly. */
5757 if (exception_return)
5758 error (_("Cannot single-step exception return"));
5760 /* We don't handle any stores here for now. */
5761 gdb_assert (dsc->u.block.load != 0);
5763 if (debug_displaced)
5764 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5765 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5766 dsc->u.block.increment ? "inc" : "dec",
5767 dsc->u.block.before ? "before" : "after");
5774 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5777 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5780 xfer_addr += bump_before;
5782 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5783 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5785 xfer_addr += bump_after;
5787 regmask &= ~(1 << regno);
5790 if (dsc->u.block.writeback)
5791 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5795 /* Clean up an STM which included the PC in the register list. */
5798 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5799 struct displaced_step_closure *dsc)
5801 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5802 int store_executed = condition_true (dsc->u.block.cond, status);
5803 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5813 if (dsc->u.block.increment)
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5817 if (dsc->u.block.before)
5822 pc_stored_at = dsc->u.block.xfer_addr;
5824 if (dsc->u.block.before)
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 struct displaced_step_closure *dsc)
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = bitcount (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5865 clobbered = (1 << num_to_shuffle) - 1;
5867 while (num_to_shuffle > 0)
5869 if ((mask & (1 << write_reg)) != 0)
5871 unsigned int read_reg = num_to_shuffle - 1;
5873 if (read_reg != write_reg)
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5887 clobbered &= ~(1 << write_reg);
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5898 if ((clobbered & (1 << write_reg)) != 0)
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5917 new_rn_val -= regs_loaded * 4;
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 struct displaced_step_closure *dsc)
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5939 /* Block transfers which don't mention PC can be run directly
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5944 if (rn == ARM_PC_REGNUM)
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5965 dsc->u.block.regmask = insn & 0xffff;
5969 if ((insn & 0xffff) == 0xffff)
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5976 dsc->modinsn[0] = ARM_NOP;
5978 dsc->cleanup = &cleanup_block_load_all;
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
5988 unsigned int to = 0, from = 0, i, new_rn;
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5998 ldm r14!, {r0-r13,pc}
6000 which would need to be rewritten as:
6004 but that can't work, because there's no free register for N.
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6012 new_regmask = (1 << num_in_list) - 1;
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6022 dsc->cleanup = &cleanup_block_load_pc;
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6035 dsc->cleanup = &cleanup_block_store_pc;
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 struct displaced_step_closure *dsc)
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6050 /* Block transfers which don't mention PC can be run directly
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6055 if (rn == ARM_PC_REGNUM)
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6081 if (dsc->u.block.regmask == 0xffff)
6083 /* This branch is impossible to happen. */
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6090 unsigned int to = 0, from = 0, i, new_rn;
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6098 new_regmask = (1 << num_in_list) - 1;
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6110 dsc->cleanup = &cleanup_block_load_pc;
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6130 return read_memory_unsigned_integer (memaddr, len, byte_order);
6133 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6136 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6139 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6142 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6145 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self, CORE_ADDR pc)
6147 struct gdbarch_tdep *tdep;
6149 tdep = gdbarch_tdep (get_regcache_arch (self->regcache));
6150 if (tdep->syscall_next_pc != NULL)
6151 return tdep->syscall_next_pc (self->regcache);
6156 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6159 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6161 return arm_is_thumb (self->regcache);
6164 /* single_step() is called just before we want to resume the inferior,
6165 if we want to single-step it but there is no hardware or kernel
6166 single-step support. We find the target of the coming instructions
6167 and breakpoint them. */
6170 arm_software_single_step (struct frame_info *frame)
6172 struct regcache *regcache = get_current_regcache ();
6173 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6174 struct address_space *aspace = get_regcache_aspace (regcache);
6175 struct arm_get_next_pcs next_pcs_ctx;
6178 VEC (CORE_ADDR) *next_pcs = NULL;
6179 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6181 arm_get_next_pcs_ctor (&next_pcs_ctx,
6182 &arm_get_next_pcs_ops,
6183 gdbarch_byte_order (gdbarch),
6184 gdbarch_byte_order_for_code (gdbarch),
6185 gdbarch_tdep (gdbarch)->thumb2_breakpoint,
6188 next_pcs = arm_get_next_pcs (&next_pcs_ctx, regcache_read_pc (regcache));
6190 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6191 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6193 do_cleanups (old_chain);
6198 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6199 for Linux, where some SVC instructions must be treated specially. */
6202 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6203 struct displaced_step_closure *dsc)
6205 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6207 if (debug_displaced)
6208 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6209 "%.8lx\n", (unsigned long) resume_addr);
6211 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6215 /* Common copy routine for svc instruciton. */
6218 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6219 struct displaced_step_closure *dsc)
6221 /* Preparation: none.
6222 Insn: unmodified svc.
6223 Cleanup: pc <- insn_addr + insn_size. */
6225 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6227 dsc->wrote_to_pc = 1;
6229 /* Allow OS-specific code to override SVC handling. */
6230 if (dsc->u.svc.copy_svc_os)
6231 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6234 dsc->cleanup = &cleanup_svc;
6240 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6241 struct regcache *regs, struct displaced_step_closure *dsc)
6244 if (debug_displaced)
6245 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6246 (unsigned long) insn);
6248 dsc->modinsn[0] = insn;
6250 return install_svc (gdbarch, regs, dsc);
6254 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6255 struct regcache *regs, struct displaced_step_closure *dsc)
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6262 dsc->modinsn[0] = insn;
6264 return install_svc (gdbarch, regs, dsc);
6267 /* Copy undefined instructions. */
6270 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6271 struct displaced_step_closure *dsc)
6273 if (debug_displaced)
6274 fprintf_unfiltered (gdb_stdlog,
6275 "displaced: copying undefined insn %.8lx\n",
6276 (unsigned long) insn);
6278 dsc->modinsn[0] = insn;
6284 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6285 struct displaced_step_closure *dsc)
6288 if (debug_displaced)
6289 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6290 "%.4x %.4x\n", (unsigned short) insn1,
6291 (unsigned short) insn2);
6293 dsc->modinsn[0] = insn1;
6294 dsc->modinsn[1] = insn2;
6300 /* Copy unpredictable instructions. */
6303 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6304 struct displaced_step_closure *dsc)
6306 if (debug_displaced)
6307 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6308 "%.8lx\n", (unsigned long) insn);
6310 dsc->modinsn[0] = insn;
6315 /* The decode_* functions are instruction decoding helpers. They mostly follow
6316 the presentation in the ARM ARM. */
6319 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6320 struct regcache *regs,
6321 struct displaced_step_closure *dsc)
6323 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6324 unsigned int rn = bits (insn, 16, 19);
6326 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6327 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6328 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6329 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6330 else if ((op1 & 0x60) == 0x20)
6331 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6332 else if ((op1 & 0x71) == 0x40)
6333 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6335 else if ((op1 & 0x77) == 0x41)
6336 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6337 else if ((op1 & 0x77) == 0x45)
6338 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6339 else if ((op1 & 0x77) == 0x51)
6342 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6344 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op1 & 0x77) == 0x55)
6347 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6348 else if (op1 == 0x57)
6351 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6352 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6353 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6354 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6355 default: return arm_copy_unpred (gdbarch, insn, dsc);
6357 else if ((op1 & 0x63) == 0x43)
6358 return arm_copy_unpred (gdbarch, insn, dsc);
6359 else if ((op2 & 0x1) == 0x0)
6360 switch (op1 & ~0x80)
6363 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6365 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6366 case 0x71: case 0x75:
6368 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6369 case 0x63: case 0x67: case 0x73: case 0x77:
6370 return arm_copy_unpred (gdbarch, insn, dsc);
6372 return arm_copy_undef (gdbarch, insn, dsc);
6375 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6379 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6380 struct regcache *regs,
6381 struct displaced_step_closure *dsc)
6383 if (bit (insn, 27) == 0)
6384 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6385 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6386 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6389 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6392 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6394 case 0x4: case 0x5: case 0x6: case 0x7:
6395 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6398 switch ((insn & 0xe00000) >> 21)
6400 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6402 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6405 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6408 return arm_copy_undef (gdbarch, insn, dsc);
6413 int rn_f = (bits (insn, 16, 19) == 0xf);
6414 switch ((insn & 0xe00000) >> 21)
6417 /* ldc/ldc2 imm (undefined for rn == pc). */
6418 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6419 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6422 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6424 case 0x4: case 0x5: case 0x6: case 0x7:
6425 /* ldc/ldc2 lit (undefined for rn != pc). */
6426 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6427 : arm_copy_undef (gdbarch, insn, dsc);
6430 return arm_copy_undef (gdbarch, insn, dsc);
6435 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6438 if (bits (insn, 16, 19) == 0xf)
6440 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6442 return arm_copy_undef (gdbarch, insn, dsc);
6446 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6448 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6452 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6454 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6457 return arm_copy_undef (gdbarch, insn, dsc);
6461 /* Decode miscellaneous instructions in dp/misc encoding space. */
6464 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6465 struct regcache *regs,
6466 struct displaced_step_closure *dsc)
6468 unsigned int op2 = bits (insn, 4, 6);
6469 unsigned int op = bits (insn, 21, 22);
6470 unsigned int op1 = bits (insn, 16, 19);
6475 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6478 if (op == 0x1) /* bx. */
6479 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6481 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6483 return arm_copy_undef (gdbarch, insn, dsc);
6487 /* Not really supported. */
6488 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6490 return arm_copy_undef (gdbarch, insn, dsc);
6494 return arm_copy_bx_blx_reg (gdbarch, insn,
6495 regs, dsc); /* blx register. */
6497 return arm_copy_undef (gdbarch, insn, dsc);
6500 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6504 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6506 /* Not really supported. */
6507 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6510 return arm_copy_undef (gdbarch, insn, dsc);
6515 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6516 struct regcache *regs,
6517 struct displaced_step_closure *dsc)
6520 switch (bits (insn, 20, 24))
6523 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6526 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6528 case 0x12: case 0x16:
6529 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6532 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6536 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6538 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6539 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6540 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6541 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6542 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6543 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6544 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6545 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6546 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6547 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6548 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6549 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6550 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6551 /* 2nd arg means "unpriveleged". */
6552 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6556 /* Should be unreachable. */
6561 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6562 struct regcache *regs,
6563 struct displaced_step_closure *dsc)
6565 int a = bit (insn, 25), b = bit (insn, 4);
6566 uint32_t op1 = bits (insn, 20, 24);
6567 int rn_f = bits (insn, 16, 19) == 0xf;
6569 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6570 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6571 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6572 else if ((!a && (op1 & 0x17) == 0x02)
6573 || (a && (op1 & 0x17) == 0x02 && !b))
6574 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6575 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6576 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6577 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6578 else if ((!a && (op1 & 0x17) == 0x03)
6579 || (a && (op1 & 0x17) == 0x03 && !b))
6580 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6581 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6582 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6583 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6584 else if ((!a && (op1 & 0x17) == 0x06)
6585 || (a && (op1 & 0x17) == 0x06 && !b))
6586 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6587 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6588 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6589 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6590 else if ((!a && (op1 & 0x17) == 0x07)
6591 || (a && (op1 & 0x17) == 0x07 && !b))
6592 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6594 /* Should be unreachable. */
6599 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6600 struct displaced_step_closure *dsc)
6602 switch (bits (insn, 20, 24))
6604 case 0x00: case 0x01: case 0x02: case 0x03:
6605 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6607 case 0x04: case 0x05: case 0x06: case 0x07:
6608 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6610 case 0x08: case 0x09: case 0x0a: case 0x0b:
6611 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6612 return arm_copy_unmodified (gdbarch, insn,
6613 "decode/pack/unpack/saturate/reverse", dsc);
6616 if (bits (insn, 5, 7) == 0) /* op2. */
6618 if (bits (insn, 12, 15) == 0xf)
6619 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6621 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6624 return arm_copy_undef (gdbarch, insn, dsc);
6626 case 0x1a: case 0x1b:
6627 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6628 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6630 return arm_copy_undef (gdbarch, insn, dsc);
6632 case 0x1c: case 0x1d:
6633 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6635 if (bits (insn, 0, 3) == 0xf)
6636 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6638 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6641 return arm_copy_undef (gdbarch, insn, dsc);
6643 case 0x1e: case 0x1f:
6644 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6645 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6647 return arm_copy_undef (gdbarch, insn, dsc);
6650 /* Should be unreachable. */
6655 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
6656 struct regcache *regs,
6657 struct displaced_step_closure *dsc)
6660 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6662 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6666 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6667 struct regcache *regs,
6668 struct displaced_step_closure *dsc)
6670 unsigned int opcode = bits (insn, 20, 24);
6674 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6675 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6677 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6678 case 0x12: case 0x16:
6679 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6681 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6682 case 0x13: case 0x17:
6683 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6685 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6686 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6687 /* Note: no writeback for these instructions. Bit 25 will always be
6688 zero though (via caller), so the following works OK. */
6689 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6692 /* Should be unreachable. */
6696 /* Decode shifted register instructions. */
6699 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6700 uint16_t insn2, struct regcache *regs,
6701 struct displaced_step_closure *dsc)
6703 /* PC is only allowed to be used in instruction MOV. */
6705 unsigned int op = bits (insn1, 5, 8);
6706 unsigned int rn = bits (insn1, 0, 3);
6708 if (op == 0x2 && rn == 0xf) /* MOV */
6709 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6711 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6712 "dp (shift reg)", dsc);
6716 /* Decode extension register load/store. Exactly the same as
6717 arm_decode_ext_reg_ld_st. */
6720 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6721 uint16_t insn2, struct regcache *regs,
6722 struct displaced_step_closure *dsc)
6724 unsigned int opcode = bits (insn1, 4, 8);
6728 case 0x04: case 0x05:
6729 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6730 "vfp/neon vmov", dsc);
6732 case 0x08: case 0x0c: /* 01x00 */
6733 case 0x0a: case 0x0e: /* 01x10 */
6734 case 0x12: case 0x16: /* 10x10 */
6735 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6736 "vfp/neon vstm/vpush", dsc);
6738 case 0x09: case 0x0d: /* 01x01 */
6739 case 0x0b: case 0x0f: /* 01x11 */
6740 case 0x13: case 0x17: /* 10x11 */
6741 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6742 "vfp/neon vldm/vpop", dsc);
6744 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6745 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6747 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6748 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6751 /* Should be unreachable. */
6756 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6757 struct regcache *regs, struct displaced_step_closure *dsc)
6759 unsigned int op1 = bits (insn, 20, 25);
6760 int op = bit (insn, 4);
6761 unsigned int coproc = bits (insn, 8, 11);
6762 unsigned int rn = bits (insn, 16, 19);
6764 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6765 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6766 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6767 && (coproc & 0xe) != 0xa)
6769 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6770 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6771 && (coproc & 0xe) != 0xa)
6772 /* ldc/ldc2 imm/lit. */
6773 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6774 else if ((op1 & 0x3e) == 0x00)
6775 return arm_copy_undef (gdbarch, insn, dsc);
6776 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6778 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6780 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6781 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6782 else if ((op1 & 0x30) == 0x20 && !op)
6784 if ((coproc & 0xe) == 0xa)
6785 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6787 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6789 else if ((op1 & 0x30) == 0x20 && op)
6790 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6791 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6792 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6793 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6794 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6795 else if ((op1 & 0x30) == 0x30)
6796 return arm_copy_svc (gdbarch, insn, regs, dsc);
6798 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6802 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6803 uint16_t insn2, struct regcache *regs,
6804 struct displaced_step_closure *dsc)
6806 unsigned int coproc = bits (insn2, 8, 11);
6807 unsigned int op1 = bits (insn1, 4, 9);
6808 unsigned int bit_5_8 = bits (insn1, 5, 8);
6809 unsigned int bit_9 = bit (insn1, 9);
6810 unsigned int bit_4 = bit (insn1, 4);
6811 unsigned int rn = bits (insn1, 0, 3);
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6819 else if (bit_5_8 == 0) /* UNDEFINED. */
6820 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6823 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6824 if ((coproc & 0xe) == 0xa)
6825 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6827 else /* coproc is not 101x. */
6829 if (bit_4 == 0) /* STC/STC2. */
6830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6832 else /* LDC/LDC2 {literal, immeidate}. */
6833 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6839 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6845 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6846 struct displaced_step_closure *dsc, int rd)
6852 Preparation: Rd <- PC
6858 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6859 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6863 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6864 struct displaced_step_closure *dsc,
6865 int rd, unsigned int imm)
6868 /* Encoding T2: ADDS Rd, #imm */
6869 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6871 install_pc_relative (gdbarch, regs, dsc, rd);
6877 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6878 struct regcache *regs,
6879 struct displaced_step_closure *dsc)
6881 unsigned int rd = bits (insn, 8, 10);
6882 unsigned int imm8 = bits (insn, 0, 7);
6884 if (debug_displaced)
6885 fprintf_unfiltered (gdb_stdlog,
6886 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6889 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6893 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6894 uint16_t insn2, struct regcache *regs,
6895 struct displaced_step_closure *dsc)
6897 unsigned int rd = bits (insn2, 8, 11);
6898 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6899 extract raw immediate encoding rather than computing immediate. When
6900 generating ADD or SUB instruction, we can simply perform OR operation to
6901 set immediate into ADD. */
6902 unsigned int imm_3_8 = insn2 & 0x70ff;
6903 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6905 if (debug_displaced)
6906 fprintf_unfiltered (gdb_stdlog,
6907 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6908 rd, imm_i, imm_3_8, insn1, insn2);
6910 if (bit (insn1, 7)) /* Encoding T2 */
6912 /* Encoding T3: SUB Rd, Rd, #imm */
6913 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6914 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6916 else /* Encoding T3 */
6918 /* Encoding T3: ADD Rd, Rd, #imm */
6919 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6920 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6924 install_pc_relative (gdbarch, regs, dsc, rd);
6930 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
6931 struct regcache *regs,
6932 struct displaced_step_closure *dsc)
6934 unsigned int rt = bits (insn1, 8, 10);
6936 int imm8 = (bits (insn1, 0, 7) << 2);
6937 CORE_ADDR from = dsc->insn_addr;
6943 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6945 Insn: LDR R0, [R2, R3];
6946 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6948 if (debug_displaced)
6949 fprintf_unfiltered (gdb_stdlog,
6950 "displaced: copying thumb ldr r%d [pc #%d]\n"
6953 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6954 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6955 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6956 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6957 /* The assembler calculates the required value of the offset from the
6958 Align(PC,4) value of this instruction to the label. */
6959 pc = pc & 0xfffffffc;
6961 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6962 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6965 dsc->u.ldst.xfersize = 4;
6967 dsc->u.ldst.immed = 0;
6968 dsc->u.ldst.writeback = 0;
6969 dsc->u.ldst.restore_r4 = 0;
6971 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6973 dsc->cleanup = &cleanup_load;
6978 /* Copy Thumb cbnz/cbz insruction. */
6981 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6982 struct regcache *regs,
6983 struct displaced_step_closure *dsc)
6985 int non_zero = bit (insn1, 11);
6986 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6987 CORE_ADDR from = dsc->insn_addr;
6988 int rn = bits (insn1, 0, 2);
6989 int rn_val = displaced_read_reg (regs, dsc, rn);
6991 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6992 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6993 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6994 condition is false, let it be, cleanup_branch will do nothing. */
6995 if (dsc->u.branch.cond)
6997 dsc->u.branch.cond = INST_AL;
6998 dsc->u.branch.dest = from + 4 + imm5;
7001 dsc->u.branch.dest = from + 2;
7003 dsc->u.branch.link = 0;
7004 dsc->u.branch.exchange = 0;
7006 if (debug_displaced)
7007 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7008 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7009 rn, rn_val, insn1, dsc->u.branch.dest);
7011 dsc->modinsn[0] = THUMB_NOP;
7013 dsc->cleanup = &cleanup_branch;
7017 /* Copy Table Branch Byte/Halfword */
7019 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7020 uint16_t insn2, struct regcache *regs,
7021 struct displaced_step_closure *dsc)
7023 ULONGEST rn_val, rm_val;
7024 int is_tbh = bit (insn2, 4);
7025 CORE_ADDR halfwords = 0;
7026 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7028 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7029 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7035 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7036 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7042 target_read_memory (rn_val + rm_val, buf, 1);
7043 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7046 if (debug_displaced)
7047 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7048 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7049 (unsigned int) rn_val, (unsigned int) rm_val,
7050 (unsigned int) halfwords);
7052 dsc->u.branch.cond = INST_AL;
7053 dsc->u.branch.link = 0;
7054 dsc->u.branch.exchange = 0;
7055 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7057 dsc->cleanup = &cleanup_branch;
7063 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7064 struct displaced_step_closure *dsc)
7067 int val = displaced_read_reg (regs, dsc, 7);
7068 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7071 val = displaced_read_reg (regs, dsc, 8);
7072 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7075 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7080 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
7081 struct regcache *regs,
7082 struct displaced_step_closure *dsc)
7084 dsc->u.block.regmask = insn1 & 0x00ff;
7086 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7089 (1) register list is full, that is, r0-r7 are used.
7090 Prepare: tmp[0] <- r8
7092 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7093 MOV r8, r7; Move value of r7 to r8;
7094 POP {r7}; Store PC value into r7.
7096 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7098 (2) register list is not full, supposing there are N registers in
7099 register list (except PC, 0 <= N <= 7).
7100 Prepare: for each i, 0 - N, tmp[i] <- ri.
7102 POP {r0, r1, ...., rN};
7104 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7105 from tmp[] properly.
7107 if (debug_displaced)
7108 fprintf_unfiltered (gdb_stdlog,
7109 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7110 dsc->u.block.regmask, insn1);
7112 if (dsc->u.block.regmask == 0xff)
7114 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7116 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7117 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7118 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7121 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7125 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7126 unsigned int new_regmask, bit = 1;
7127 unsigned int to = 0, from = 0, i, new_rn;
7129 for (i = 0; i < num_in_list + 1; i++)
7130 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7132 new_regmask = (1 << (num_in_list + 1)) - 1;
7134 if (debug_displaced)
7135 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7136 "{..., pc}: original reg list %.4x,"
7137 " modified list %.4x\n"),
7138 (int) dsc->u.block.regmask, new_regmask);
7140 dsc->u.block.regmask |= 0x8000;
7141 dsc->u.block.writeback = 0;
7142 dsc->u.block.cond = INST_AL;
7144 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7146 dsc->cleanup = &cleanup_block_load_pc;
7153 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7154 struct regcache *regs,
7155 struct displaced_step_closure *dsc)
7157 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7158 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7161 /* 16-bit thumb instructions. */
7162 switch (op_bit_12_15)
7164 /* Shift (imme), add, subtract, move and compare. */
7165 case 0: case 1: case 2: case 3:
7166 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7167 "shift/add/sub/mov/cmp",
7171 switch (op_bit_10_11)
7173 case 0: /* Data-processing */
7174 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7178 case 1: /* Special data instructions and branch and exchange. */
7180 unsigned short op = bits (insn1, 7, 9);
7181 if (op == 6 || op == 7) /* BX or BLX */
7182 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7183 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7184 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7186 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7190 default: /* LDR (literal) */
7191 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7194 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7195 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7198 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7199 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7200 else /* Generate SP-relative address */
7201 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7203 case 11: /* Misc 16-bit instructions */
7205 switch (bits (insn1, 8, 11))
7207 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7208 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7210 case 12: case 13: /* POP */
7211 if (bit (insn1, 8)) /* PC is in register list. */
7212 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7214 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7216 case 15: /* If-Then, and hints */
7217 if (bits (insn1, 0, 3))
7218 /* If-Then makes up to four following instructions conditional.
7219 IT instruction itself is not conditional, so handle it as a
7220 common unmodified instruction. */
7221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7224 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7227 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7232 if (op_bit_10_11 < 2) /* Store multiple registers */
7233 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7234 else /* Load multiple registers */
7235 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7237 case 13: /* Conditional branch and supervisor call */
7238 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7239 err = thumb_copy_b (gdbarch, insn1, dsc);
7241 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7243 case 14: /* Unconditional branch */
7244 err = thumb_copy_b (gdbarch, insn1, dsc);
7251 internal_error (__FILE__, __LINE__,
7252 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7256 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7257 uint16_t insn1, uint16_t insn2,
7258 struct regcache *regs,
7259 struct displaced_step_closure *dsc)
7261 int rt = bits (insn2, 12, 15);
7262 int rn = bits (insn1, 0, 3);
7263 int op1 = bits (insn1, 7, 8);
7266 switch (bits (insn1, 5, 6))
7268 case 0: /* Load byte and memory hints */
7269 if (rt == 0xf) /* PLD/PLI */
7272 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7273 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7275 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7280 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7281 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7285 "ldrb{reg, immediate}/ldrbt",
7290 case 1: /* Load halfword and memory hints. */
7291 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7292 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7293 "pld/unalloc memhint", dsc);
7297 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7300 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7304 case 2: /* Load word */
7306 int insn2_bit_8_11 = bits (insn2, 8, 11);
7309 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7310 else if (op1 == 0x1) /* Encoding T3 */
7311 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7313 else /* op1 == 0x0 */
7315 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7316 /* LDR (immediate) */
7317 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7318 dsc, bit (insn2, 8), 1);
7319 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7320 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7323 /* LDR (register) */
7324 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7330 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7337 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7338 uint16_t insn2, struct regcache *regs,
7339 struct displaced_step_closure *dsc)
7342 unsigned short op = bit (insn2, 15);
7343 unsigned int op1 = bits (insn1, 11, 12);
7349 switch (bits (insn1, 9, 10))
7354 /* Load/store {dual, execlusive}, table branch. */
7355 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7356 && bits (insn2, 5, 7) == 0)
7357 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7360 /* PC is not allowed to use in load/store {dual, exclusive}
7362 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7363 "load/store dual/ex", dsc);
7365 else /* load/store multiple */
7367 switch (bits (insn1, 7, 8))
7369 case 0: case 3: /* SRS, RFE */
7370 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7373 case 1: case 2: /* LDM/STM/PUSH/POP */
7374 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7381 /* Data-processing (shift register). */
7382 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7385 default: /* Coprocessor instructions. */
7386 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7391 case 2: /* op1 = 2 */
7392 if (op) /* Branch and misc control. */
7394 if (bit (insn2, 14) /* BLX/BL */
7395 || bit (insn2, 12) /* Unconditional branch */
7396 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7397 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7399 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7404 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7406 int op = bits (insn1, 4, 8);
7407 int rn = bits (insn1, 0, 3);
7408 if ((op == 0 || op == 0xa) && rn == 0xf)
7409 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7412 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7415 else /* Data processing (modified immeidate) */
7416 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 case 3: /* op1 = 3 */
7421 switch (bits (insn1, 9, 10))
7425 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7427 else /* NEON Load/Store and Store single data item */
7428 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7429 "neon elt/struct load/store",
7432 case 1: /* op1 = 3, bits (9, 10) == 1 */
7433 switch (bits (insn1, 7, 8))
7435 case 0: case 1: /* Data processing (register) */
7436 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7439 case 2: /* Multiply and absolute difference */
7440 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7441 "mul/mua/diff", dsc);
7443 case 3: /* Long multiply and divide */
7444 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7449 default: /* Coprocessor instructions */
7450 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7459 internal_error (__FILE__, __LINE__,
7460 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7465 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7466 CORE_ADDR to, struct regcache *regs,
7467 struct displaced_step_closure *dsc)
7469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7471 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7473 if (debug_displaced)
7474 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7475 "at %.8lx\n", insn1, (unsigned long) from);
7478 dsc->insn_size = thumb_insn_size (insn1);
7479 if (thumb_insn_size (insn1) == 4)
7482 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7483 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7486 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7490 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7491 CORE_ADDR to, struct regcache *regs,
7492 struct displaced_step_closure *dsc)
7495 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7498 /* Most displaced instructions use a 1-instruction scratch space, so set this
7499 here and override below if/when necessary. */
7501 dsc->insn_addr = from;
7502 dsc->scratch_base = to;
7503 dsc->cleanup = NULL;
7504 dsc->wrote_to_pc = 0;
7506 if (!displaced_in_arm_mode (regs))
7507 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
7511 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7512 if (debug_displaced)
7513 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7514 "at %.8lx\n", (unsigned long) insn,
7515 (unsigned long) from);
7517 if ((insn & 0xf0000000) == 0xf0000000)
7518 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7519 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7521 case 0x0: case 0x1: case 0x2: case 0x3:
7522 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7525 case 0x4: case 0x5: case 0x6:
7526 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7530 err = arm_decode_media (gdbarch, insn, dsc);
7533 case 0x8: case 0x9: case 0xa: case 0xb:
7534 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7537 case 0xc: case 0xd: case 0xe: case 0xf:
7538 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
7543 internal_error (__FILE__, __LINE__,
7544 _("arm_process_displaced_insn: Instruction decode error"));
7547 /* Actually set up the scratch space for a displaced instruction. */
7550 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7551 CORE_ADDR to, struct displaced_step_closure *dsc)
7553 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7554 unsigned int i, len, offset;
7555 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7556 int size = dsc->is_thumb? 2 : 4;
7557 const gdb_byte *bkp_insn;
7560 /* Poke modified instruction(s). */
7561 for (i = 0; i < dsc->numinsns; i++)
7563 if (debug_displaced)
7565 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7567 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7570 fprintf_unfiltered (gdb_stdlog, "%.4x",
7571 (unsigned short)dsc->modinsn[i]);
7573 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7574 (unsigned long) to + offset);
7577 write_memory_unsigned_integer (to + offset, size,
7578 byte_order_for_code,
7583 /* Choose the correct breakpoint instruction. */
7586 bkp_insn = tdep->thumb_breakpoint;
7587 len = tdep->thumb_breakpoint_size;
7591 bkp_insn = tdep->arm_breakpoint;
7592 len = tdep->arm_breakpoint_size;
7595 /* Put breakpoint afterwards. */
7596 write_memory (to + offset, bkp_insn, len);
7598 if (debug_displaced)
7599 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7600 paddress (gdbarch, from), paddress (gdbarch, to));
7603 /* Entry point for copying an instruction into scratch space for displaced
7606 struct displaced_step_closure *
7607 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
7608 CORE_ADDR from, CORE_ADDR to,
7609 struct regcache *regs)
7611 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
7613 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
7614 arm_displaced_init_closure (gdbarch, from, to, dsc);
7619 /* Entry point for cleaning things up after a displaced instruction has been
7623 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7624 struct displaced_step_closure *dsc,
7625 CORE_ADDR from, CORE_ADDR to,
7626 struct regcache *regs)
7629 dsc->cleanup (gdbarch, regs, dsc);
7631 if (!dsc->wrote_to_pc)
7632 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7633 dsc->insn_addr + dsc->insn_size);
7637 #include "bfd-in2.h"
7638 #include "libcoff.h"
7641 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7643 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7645 if (arm_pc_is_thumb (gdbarch, memaddr))
7647 static asymbol *asym;
7648 static combined_entry_type ce;
7649 static struct coff_symbol_struct csym;
7650 static struct bfd fake_bfd;
7651 static bfd_target fake_target;
7653 if (csym.native == NULL)
7655 /* Create a fake symbol vector containing a Thumb symbol.
7656 This is solely so that the code in print_insn_little_arm()
7657 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7658 the presence of a Thumb symbol and switch to decoding
7659 Thumb instructions. */
7661 fake_target.flavour = bfd_target_coff_flavour;
7662 fake_bfd.xvec = &fake_target;
7663 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7665 csym.symbol.the_bfd = &fake_bfd;
7666 csym.symbol.name = "fake";
7667 asym = (asymbol *) & csym;
7670 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7671 info->symbols = &asym;
7674 info->symbols = NULL;
7676 if (info->endian == BFD_ENDIAN_BIG)
7677 return print_insn_big_arm (memaddr, info);
7679 return print_insn_little_arm (memaddr, info);
7682 /* The following define instruction sequences that will cause ARM
7683 cpu's to take an undefined instruction trap. These are used to
7684 signal a breakpoint to GDB.
7686 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7687 modes. A different instruction is required for each mode. The ARM
7688 cpu's can also be big or little endian. Thus four different
7689 instructions are needed to support all cases.
7691 Note: ARMv4 defines several new instructions that will take the
7692 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7693 not in fact add the new instructions. The new undefined
7694 instructions in ARMv4 are all instructions that had no defined
7695 behaviour in earlier chips. There is no guarantee that they will
7696 raise an exception, but may be treated as NOP's. In practice, it
7697 may only safe to rely on instructions matching:
7699 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7700 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7701 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7703 Even this may only true if the condition predicate is true. The
7704 following use a condition predicate of ALWAYS so it is always TRUE.
7706 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7707 and NetBSD all use a software interrupt rather than an undefined
7708 instruction to force a trap. This can be handled by by the
7709 abi-specific code during establishment of the gdbarch vector. */
7711 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7712 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7713 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7714 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7716 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7717 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7718 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7719 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7721 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7722 the program counter value to determine whether a 16-bit or 32-bit
7723 breakpoint should be used. It returns a pointer to a string of
7724 bytes that encode a breakpoint instruction, stores the length of
7725 the string to *lenptr, and adjusts the program counter (if
7726 necessary) to point to the actual memory location where the
7727 breakpoint should be inserted. */
7729 static const unsigned char *
7730 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7732 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7733 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7735 if (arm_pc_is_thumb (gdbarch, *pcptr))
7737 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7739 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7740 check whether we are replacing a 32-bit instruction. */
7741 if (tdep->thumb2_breakpoint != NULL)
7744 if (target_read_memory (*pcptr, buf, 2) == 0)
7746 unsigned short inst1;
7747 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7748 if (thumb_insn_size (inst1) == 4)
7750 *lenptr = tdep->thumb2_breakpoint_size;
7751 return tdep->thumb2_breakpoint;
7756 *lenptr = tdep->thumb_breakpoint_size;
7757 return tdep->thumb_breakpoint;
7761 *lenptr = tdep->arm_breakpoint_size;
7762 return tdep->arm_breakpoint;
7767 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7770 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7772 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7773 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7774 that this is not confused with a 32-bit ARM breakpoint. */
7778 /* Extract from an array REGBUF containing the (raw) register state a
7779 function return value of type TYPE, and copy that, in virtual
7780 format, into VALBUF. */
7783 arm_extract_return_value (struct type *type, struct regcache *regs,
7786 struct gdbarch *gdbarch = get_regcache_arch (regs);
7787 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7789 if (TYPE_CODE_FLT == TYPE_CODE (type))
7791 switch (gdbarch_tdep (gdbarch)->fp_model)
7795 /* The value is in register F0 in internal format. We need to
7796 extract the raw value and then convert it to the desired
7798 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7800 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7801 convert_from_extended (floatformat_from_type (type), tmpbuf,
7802 valbuf, gdbarch_byte_order (gdbarch));
7806 case ARM_FLOAT_SOFT_FPA:
7807 case ARM_FLOAT_SOFT_VFP:
7808 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7809 not using the VFP ABI code. */
7811 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7812 if (TYPE_LENGTH (type) > 4)
7813 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7814 valbuf + INT_REGISTER_SIZE);
7818 internal_error (__FILE__, __LINE__,
7819 _("arm_extract_return_value: "
7820 "Floating point model not supported"));
7824 else if (TYPE_CODE (type) == TYPE_CODE_INT
7825 || TYPE_CODE (type) == TYPE_CODE_CHAR
7826 || TYPE_CODE (type) == TYPE_CODE_BOOL
7827 || TYPE_CODE (type) == TYPE_CODE_PTR
7828 || TYPE_CODE (type) == TYPE_CODE_REF
7829 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7831 /* If the type is a plain integer, then the access is
7832 straight-forward. Otherwise we have to play around a bit
7834 int len = TYPE_LENGTH (type);
7835 int regno = ARM_A1_REGNUM;
7840 /* By using store_unsigned_integer we avoid having to do
7841 anything special for small big-endian values. */
7842 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7843 store_unsigned_integer (valbuf,
7844 (len > INT_REGISTER_SIZE
7845 ? INT_REGISTER_SIZE : len),
7847 len -= INT_REGISTER_SIZE;
7848 valbuf += INT_REGISTER_SIZE;
7853 /* For a structure or union the behaviour is as if the value had
7854 been stored to word-aligned memory and then loaded into
7855 registers with 32-bit load instruction(s). */
7856 int len = TYPE_LENGTH (type);
7857 int regno = ARM_A1_REGNUM;
7858 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7862 regcache_cooked_read (regs, regno++, tmpbuf);
7863 memcpy (valbuf, tmpbuf,
7864 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7865 len -= INT_REGISTER_SIZE;
7866 valbuf += INT_REGISTER_SIZE;
7872 /* Will a function return an aggregate type in memory or in a
7873 register? Return 0 if an aggregate type can be returned in a
7874 register, 1 if it must be returned in memory. */
7877 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7879 enum type_code code;
7881 type = check_typedef (type);
7883 /* Simple, non-aggregate types (ie not including vectors and
7884 complex) are always returned in a register (or registers). */
7885 code = TYPE_CODE (type);
7886 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7887 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7890 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7892 /* Vector values should be returned using ARM registers if they
7893 are not over 16 bytes. */
7894 return (TYPE_LENGTH (type) > 16);
7897 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7899 /* The AAPCS says all aggregates not larger than a word are returned
7901 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7910 /* All aggregate types that won't fit in a register must be returned
7912 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7915 /* In the ARM ABI, "integer" like aggregate types are returned in
7916 registers. For an aggregate type to be integer like, its size
7917 must be less than or equal to INT_REGISTER_SIZE and the
7918 offset of each addressable subfield must be zero. Note that bit
7919 fields are not addressable, and all addressable subfields of
7920 unions always start at offset zero.
7922 This function is based on the behaviour of GCC 2.95.1.
7923 See: gcc/arm.c: arm_return_in_memory() for details.
7925 Note: All versions of GCC before GCC 2.95.2 do not set up the
7926 parameters correctly for a function returning the following
7927 structure: struct { float f;}; This should be returned in memory,
7928 not a register. Richard Earnshaw sent me a patch, but I do not
7929 know of any way to detect if a function like the above has been
7930 compiled with the correct calling convention. */
7932 /* Assume all other aggregate types can be returned in a register.
7933 Run a check for structures, unions and arrays. */
7936 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7939 /* Need to check if this struct/union is "integer" like. For
7940 this to be true, its size must be less than or equal to
7941 INT_REGISTER_SIZE and the offset of each addressable
7942 subfield must be zero. Note that bit fields are not
7943 addressable, and unions always start at offset zero. If any
7944 of the subfields is a floating point type, the struct/union
7945 cannot be an integer type. */
7947 /* For each field in the object, check:
7948 1) Is it FP? --> yes, nRc = 1;
7949 2) Is it addressable (bitpos != 0) and
7950 not packed (bitsize == 0)?
7954 for (i = 0; i < TYPE_NFIELDS (type); i++)
7956 enum type_code field_type_code;
7959 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7962 /* Is it a floating point type field? */
7963 if (field_type_code == TYPE_CODE_FLT)
7969 /* If bitpos != 0, then we have to care about it. */
7970 if (TYPE_FIELD_BITPOS (type, i) != 0)
7972 /* Bitfields are not addressable. If the field bitsize is
7973 zero, then the field is not packed. Hence it cannot be
7974 a bitfield or any other packed type. */
7975 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7988 /* Write into appropriate registers a function return value of type
7989 TYPE, given in virtual format. */
7992 arm_store_return_value (struct type *type, struct regcache *regs,
7993 const gdb_byte *valbuf)
7995 struct gdbarch *gdbarch = get_regcache_arch (regs);
7996 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7998 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8000 gdb_byte buf[MAX_REGISTER_SIZE];
8002 switch (gdbarch_tdep (gdbarch)->fp_model)
8006 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8007 gdbarch_byte_order (gdbarch));
8008 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8011 case ARM_FLOAT_SOFT_FPA:
8012 case ARM_FLOAT_SOFT_VFP:
8013 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8014 not using the VFP ABI code. */
8016 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8017 if (TYPE_LENGTH (type) > 4)
8018 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8019 valbuf + INT_REGISTER_SIZE);
8023 internal_error (__FILE__, __LINE__,
8024 _("arm_store_return_value: Floating "
8025 "point model not supported"));
8029 else if (TYPE_CODE (type) == TYPE_CODE_INT
8030 || TYPE_CODE (type) == TYPE_CODE_CHAR
8031 || TYPE_CODE (type) == TYPE_CODE_BOOL
8032 || TYPE_CODE (type) == TYPE_CODE_PTR
8033 || TYPE_CODE (type) == TYPE_CODE_REF
8034 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8036 if (TYPE_LENGTH (type) <= 4)
8038 /* Values of one word or less are zero/sign-extended and
8040 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8041 LONGEST val = unpack_long (type, valbuf);
8043 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8044 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8048 /* Integral values greater than one word are stored in consecutive
8049 registers starting with r0. This will always be a multiple of
8050 the regiser size. */
8051 int len = TYPE_LENGTH (type);
8052 int regno = ARM_A1_REGNUM;
8056 regcache_cooked_write (regs, regno++, valbuf);
8057 len -= INT_REGISTER_SIZE;
8058 valbuf += INT_REGISTER_SIZE;
8064 /* For a structure or union the behaviour is as if the value had
8065 been stored to word-aligned memory and then loaded into
8066 registers with 32-bit load instruction(s). */
8067 int len = TYPE_LENGTH (type);
8068 int regno = ARM_A1_REGNUM;
8069 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8073 memcpy (tmpbuf, valbuf,
8074 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8075 regcache_cooked_write (regs, regno++, tmpbuf);
8076 len -= INT_REGISTER_SIZE;
8077 valbuf += INT_REGISTER_SIZE;
8083 /* Handle function return values. */
8085 static enum return_value_convention
8086 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8087 struct type *valtype, struct regcache *regcache,
8088 gdb_byte *readbuf, const gdb_byte *writebuf)
8090 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8091 struct type *func_type = function ? value_type (function) : NULL;
8092 enum arm_vfp_cprc_base_type vfp_base_type;
8095 if (arm_vfp_abi_for_function (gdbarch, func_type)
8096 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8098 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8099 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8101 for (i = 0; i < vfp_base_count; i++)
8103 if (reg_char == 'q')
8106 arm_neon_quad_write (gdbarch, regcache, i,
8107 writebuf + i * unit_length);
8110 arm_neon_quad_read (gdbarch, regcache, i,
8111 readbuf + i * unit_length);
8118 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8119 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8122 regcache_cooked_write (regcache, regnum,
8123 writebuf + i * unit_length);
8125 regcache_cooked_read (regcache, regnum,
8126 readbuf + i * unit_length);
8129 return RETURN_VALUE_REGISTER_CONVENTION;
8132 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8133 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8134 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8136 if (tdep->struct_return == pcc_struct_return
8137 || arm_return_in_memory (gdbarch, valtype))
8138 return RETURN_VALUE_STRUCT_CONVENTION;
8140 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8142 if (arm_return_in_memory (gdbarch, valtype))
8143 return RETURN_VALUE_STRUCT_CONVENTION;
8147 arm_store_return_value (valtype, regcache, writebuf);
8150 arm_extract_return_value (valtype, regcache, readbuf);
8152 return RETURN_VALUE_REGISTER_CONVENTION;
8157 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8159 struct gdbarch *gdbarch = get_frame_arch (frame);
8160 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8161 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8163 gdb_byte buf[INT_REGISTER_SIZE];
8165 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8167 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8171 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8175 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8176 return the target PC. Otherwise return 0. */
8179 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8183 CORE_ADDR start_addr;
8185 /* Find the starting address and name of the function containing the PC. */
8186 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8188 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8190 start_addr = arm_skip_bx_reg (frame, pc);
8191 if (start_addr != 0)
8197 /* If PC is in a Thumb call or return stub, return the address of the
8198 target PC, which is in a register. The thunk functions are called
8199 _call_via_xx, where x is the register name. The possible names
8200 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8201 functions, named __ARM_call_via_r[0-7]. */
8202 if (startswith (name, "_call_via_")
8203 || startswith (name, "__ARM_call_via_"))
8205 /* Use the name suffix to determine which register contains the
8207 static char *table[15] =
8208 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8209 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8212 int offset = strlen (name) - 2;
8214 for (regno = 0; regno <= 14; regno++)
8215 if (strcmp (&name[offset], table[regno]) == 0)
8216 return get_frame_register_unsigned (frame, regno);
8219 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8220 non-interworking calls to foo. We could decode the stubs
8221 to find the target but it's easier to use the symbol table. */
8222 namelen = strlen (name);
8223 if (name[0] == '_' && name[1] == '_'
8224 && ((namelen > 2 + strlen ("_from_thumb")
8225 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8226 || (namelen > 2 + strlen ("_from_arm")
8227 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8230 int target_len = namelen - 2;
8231 struct bound_minimal_symbol minsym;
8232 struct objfile *objfile;
8233 struct obj_section *sec;
8235 if (name[namelen - 1] == 'b')
8236 target_len -= strlen ("_from_thumb");
8238 target_len -= strlen ("_from_arm");
8240 target_name = (char *) alloca (target_len + 1);
8241 memcpy (target_name, name + 2, target_len);
8242 target_name[target_len] = '\0';
8244 sec = find_pc_section (pc);
8245 objfile = (sec == NULL) ? NULL : sec->objfile;
8246 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8247 if (minsym.minsym != NULL)
8248 return BMSYMBOL_VALUE_ADDRESS (minsym);
8253 return 0; /* not a stub */
8257 set_arm_command (char *args, int from_tty)
8259 printf_unfiltered (_("\
8260 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8261 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8265 show_arm_command (char *args, int from_tty)
8267 cmd_show_list (showarmcmdlist, from_tty, "");
8271 arm_update_current_architecture (void)
8273 struct gdbarch_info info;
8275 /* If the current architecture is not ARM, we have nothing to do. */
8276 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8279 /* Update the architecture. */
8280 gdbarch_info_init (&info);
8282 if (!gdbarch_update_p (info))
8283 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8287 set_fp_model_sfunc (char *args, int from_tty,
8288 struct cmd_list_element *c)
8292 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8293 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8295 arm_fp_model = (enum arm_float_model) fp_model;
8299 if (fp_model == ARM_FLOAT_LAST)
8300 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8303 arm_update_current_architecture ();
8307 show_fp_model (struct ui_file *file, int from_tty,
8308 struct cmd_list_element *c, const char *value)
8310 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8312 if (arm_fp_model == ARM_FLOAT_AUTO
8313 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8314 fprintf_filtered (file, _("\
8315 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8316 fp_model_strings[tdep->fp_model]);
8318 fprintf_filtered (file, _("\
8319 The current ARM floating point model is \"%s\".\n"),
8320 fp_model_strings[arm_fp_model]);
8324 arm_set_abi (char *args, int from_tty,
8325 struct cmd_list_element *c)
8329 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8330 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8332 arm_abi_global = (enum arm_abi_kind) arm_abi;
8336 if (arm_abi == ARM_ABI_LAST)
8337 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8340 arm_update_current_architecture ();
8344 arm_show_abi (struct ui_file *file, int from_tty,
8345 struct cmd_list_element *c, const char *value)
8347 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8349 if (arm_abi_global == ARM_ABI_AUTO
8350 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8351 fprintf_filtered (file, _("\
8352 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8353 arm_abi_strings[tdep->arm_abi]);
8355 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8360 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8361 struct cmd_list_element *c, const char *value)
8363 fprintf_filtered (file,
8364 _("The current execution mode assumed "
8365 "(when symbols are unavailable) is \"%s\".\n"),
8366 arm_fallback_mode_string);
8370 arm_show_force_mode (struct ui_file *file, int from_tty,
8371 struct cmd_list_element *c, const char *value)
8373 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8375 fprintf_filtered (file,
8376 _("The current execution mode assumed "
8377 "(even when symbols are available) is \"%s\".\n"),
8378 arm_force_mode_string);
8381 /* If the user changes the register disassembly style used for info
8382 register and other commands, we have to also switch the style used
8383 in opcodes for disassembly output. This function is run in the "set
8384 arm disassembly" command, and does that. */
8387 set_disassembly_style_sfunc (char *args, int from_tty,
8388 struct cmd_list_element *c)
8390 set_disassembly_style ();
8393 /* Return the ARM register name corresponding to register I. */
8395 arm_register_name (struct gdbarch *gdbarch, int i)
8397 const int num_regs = gdbarch_num_regs (gdbarch);
8399 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8400 && i >= num_regs && i < num_regs + 32)
8402 static const char *const vfp_pseudo_names[] = {
8403 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8404 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8405 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8406 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8409 return vfp_pseudo_names[i - num_regs];
8412 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8413 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8415 static const char *const neon_pseudo_names[] = {
8416 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8417 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8420 return neon_pseudo_names[i - num_regs - 32];
8423 if (i >= ARRAY_SIZE (arm_register_names))
8424 /* These registers are only supported on targets which supply
8425 an XML description. */
8428 return arm_register_names[i];
8432 set_disassembly_style (void)
8436 /* Find the style that the user wants. */
8437 for (current = 0; current < num_disassembly_options; current++)
8438 if (disassembly_style == valid_disassembly_styles[current])
8440 gdb_assert (current < num_disassembly_options);
8442 /* Synchronize the disassembler. */
8443 set_arm_regname_option (current);
8446 /* Test whether the coff symbol specific value corresponds to a Thumb
8450 coff_sym_is_thumb (int val)
8452 return (val == C_THUMBEXT
8453 || val == C_THUMBSTAT
8454 || val == C_THUMBEXTFUNC
8455 || val == C_THUMBSTATFUNC
8456 || val == C_THUMBLABEL);
8459 /* arm_coff_make_msymbol_special()
8460 arm_elf_make_msymbol_special()
8462 These functions test whether the COFF or ELF symbol corresponds to
8463 an address in thumb code, and set a "special" bit in a minimal
8464 symbol to indicate that it does. */
8467 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8469 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8470 == ST_BRANCH_TO_THUMB)
8471 MSYMBOL_SET_SPECIAL (msym);
8475 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8477 if (coff_sym_is_thumb (val))
8478 MSYMBOL_SET_SPECIAL (msym);
8482 arm_objfile_data_free (struct objfile *objfile, void *arg)
8484 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8487 for (i = 0; i < objfile->obfd->section_count; i++)
8488 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8492 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8495 const char *name = bfd_asymbol_name (sym);
8496 struct arm_per_objfile *data;
8497 VEC(arm_mapping_symbol_s) **map_p;
8498 struct arm_mapping_symbol new_map_sym;
8500 gdb_assert (name[0] == '$');
8501 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8504 data = (struct arm_per_objfile *) objfile_data (objfile,
8505 arm_objfile_data_key);
8508 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8509 struct arm_per_objfile);
8510 set_objfile_data (objfile, arm_objfile_data_key, data);
8511 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8512 objfile->obfd->section_count,
8513 VEC(arm_mapping_symbol_s) *);
8515 map_p = &data->section_maps[bfd_get_section (sym)->index];
8517 new_map_sym.value = sym->value;
8518 new_map_sym.type = name[1];
8520 /* Assume that most mapping symbols appear in order of increasing
8521 value. If they were randomly distributed, it would be faster to
8522 always push here and then sort at first use. */
8523 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8525 struct arm_mapping_symbol *prev_map_sym;
8527 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8528 if (prev_map_sym->value >= sym->value)
8531 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8532 arm_compare_mapping_symbols);
8533 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8538 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8542 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8544 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8545 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8547 /* If necessary, set the T bit. */
8550 ULONGEST val, t_bit;
8551 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8552 t_bit = arm_psr_thumb_bit (gdbarch);
8553 if (arm_pc_is_thumb (gdbarch, pc))
8554 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8557 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8562 /* Read the contents of a NEON quad register, by reading from two
8563 double registers. This is used to implement the quad pseudo
8564 registers, and for argument passing in case the quad registers are
8565 missing; vectors are passed in quad registers when using the VFP
8566 ABI, even if a NEON unit is not present. REGNUM is the index of
8567 the quad register, in [0, 15]. */
8569 static enum register_status
8570 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8571 int regnum, gdb_byte *buf)
8574 gdb_byte reg_buf[8];
8575 int offset, double_regnum;
8576 enum register_status status;
8578 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8579 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8582 /* d0 is always the least significant half of q0. */
8583 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8588 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8589 if (status != REG_VALID)
8591 memcpy (buf + offset, reg_buf, 8);
8593 offset = 8 - offset;
8594 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8595 if (status != REG_VALID)
8597 memcpy (buf + offset, reg_buf, 8);
8602 static enum register_status
8603 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8604 int regnum, gdb_byte *buf)
8606 const int num_regs = gdbarch_num_regs (gdbarch);
8608 gdb_byte reg_buf[8];
8609 int offset, double_regnum;
8611 gdb_assert (regnum >= num_regs);
8614 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8615 /* Quad-precision register. */
8616 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8619 enum register_status status;
8621 /* Single-precision register. */
8622 gdb_assert (regnum < 32);
8624 /* s0 is always the least significant half of d0. */
8625 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8626 offset = (regnum & 1) ? 0 : 4;
8628 offset = (regnum & 1) ? 4 : 0;
8630 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8631 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8634 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8635 if (status == REG_VALID)
8636 memcpy (buf, reg_buf + offset, 4);
8641 /* Store the contents of BUF to a NEON quad register, by writing to
8642 two double registers. This is used to implement the quad pseudo
8643 registers, and for argument passing in case the quad registers are
8644 missing; vectors are passed in quad registers when using the VFP
8645 ABI, even if a NEON unit is not present. REGNUM is the index
8646 of the quad register, in [0, 15]. */
8649 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8650 int regnum, const gdb_byte *buf)
8653 int offset, double_regnum;
8655 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8656 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8659 /* d0 is always the least significant half of q0. */
8660 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8665 regcache_raw_write (regcache, double_regnum, buf + offset);
8666 offset = 8 - offset;
8667 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8671 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8672 int regnum, const gdb_byte *buf)
8674 const int num_regs = gdbarch_num_regs (gdbarch);
8676 gdb_byte reg_buf[8];
8677 int offset, double_regnum;
8679 gdb_assert (regnum >= num_regs);
8682 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8683 /* Quad-precision register. */
8684 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8687 /* Single-precision register. */
8688 gdb_assert (regnum < 32);
8690 /* s0 is always the least significant half of d0. */
8691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8692 offset = (regnum & 1) ? 0 : 4;
8694 offset = (regnum & 1) ? 4 : 0;
8696 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8697 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8700 regcache_raw_read (regcache, double_regnum, reg_buf);
8701 memcpy (reg_buf + offset, buf, 4);
8702 regcache_raw_write (regcache, double_regnum, reg_buf);
8706 static struct value *
8707 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8709 const int *reg_p = (const int *) baton;
8710 return value_of_register (*reg_p, frame);
8713 static enum gdb_osabi
8714 arm_elf_osabi_sniffer (bfd *abfd)
8716 unsigned int elfosabi;
8717 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8719 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8721 if (elfosabi == ELFOSABI_ARM)
8722 /* GNU tools use this value. Check note sections in this case,
8724 bfd_map_over_sections (abfd,
8725 generic_elf_osabi_sniff_abi_tag_sections,
8728 /* Anything else will be handled by the generic ELF sniffer. */
8733 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8734 struct reggroup *group)
8736 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8737 this, FPS register belongs to save_regroup, restore_reggroup, and
8738 all_reggroup, of course. */
8739 if (regnum == ARM_FPS_REGNUM)
8740 return (group == float_reggroup
8741 || group == save_reggroup
8742 || group == restore_reggroup
8743 || group == all_reggroup);
8745 return default_register_reggroup_p (gdbarch, regnum, group);
8749 /* For backward-compatibility we allow two 'g' packet lengths with
8750 the remote protocol depending on whether FPA registers are
8751 supplied. M-profile targets do not have FPA registers, but some
8752 stubs already exist in the wild which use a 'g' packet which
8753 supplies them albeit with dummy values. The packet format which
8754 includes FPA registers should be considered deprecated for
8755 M-profile targets. */
8758 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8760 if (gdbarch_tdep (gdbarch)->is_m)
8762 /* If we know from the executable this is an M-profile target,
8763 cater for remote targets whose register set layout is the
8764 same as the FPA layout. */
8765 register_remote_g_packet_guess (gdbarch,
8766 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8767 (16 * INT_REGISTER_SIZE)
8768 + (8 * FP_REGISTER_SIZE)
8769 + (2 * INT_REGISTER_SIZE),
8770 tdesc_arm_with_m_fpa_layout);
8772 /* The regular M-profile layout. */
8773 register_remote_g_packet_guess (gdbarch,
8774 /* r0-r12,sp,lr,pc; xpsr */
8775 (16 * INT_REGISTER_SIZE)
8776 + INT_REGISTER_SIZE,
8779 /* M-profile plus M4F VFP. */
8780 register_remote_g_packet_guess (gdbarch,
8781 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8782 (16 * INT_REGISTER_SIZE)
8783 + (16 * VFP_REGISTER_SIZE)
8784 + (2 * INT_REGISTER_SIZE),
8785 tdesc_arm_with_m_vfp_d16);
8788 /* Otherwise we don't have a useful guess. */
8792 /* Initialize the current architecture based on INFO. If possible,
8793 re-use an architecture from ARCHES, which is a list of
8794 architectures already created during this debugging session.
8796 Called e.g. at program startup, when reading a core file, and when
8797 reading a binary file. */
8799 static struct gdbarch *
8800 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8802 struct gdbarch_tdep *tdep;
8803 struct gdbarch *gdbarch;
8804 struct gdbarch_list *best_arch;
8805 enum arm_abi_kind arm_abi = arm_abi_global;
8806 enum arm_float_model fp_model = arm_fp_model;
8807 struct tdesc_arch_data *tdesc_data = NULL;
8809 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8810 int have_wmmx_registers = 0;
8812 int have_fpa_registers = 1;
8813 const struct target_desc *tdesc = info.target_desc;
8815 /* If we have an object to base this architecture on, try to determine
8818 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8820 int ei_osabi, e_flags;
8822 switch (bfd_get_flavour (info.abfd))
8824 case bfd_target_aout_flavour:
8825 /* Assume it's an old APCS-style ABI. */
8826 arm_abi = ARM_ABI_APCS;
8829 case bfd_target_coff_flavour:
8830 /* Assume it's an old APCS-style ABI. */
8832 arm_abi = ARM_ABI_APCS;
8835 case bfd_target_elf_flavour:
8836 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8837 e_flags = elf_elfheader (info.abfd)->e_flags;
8839 if (ei_osabi == ELFOSABI_ARM)
8841 /* GNU tools used to use this value, but do not for EABI
8842 objects. There's nowhere to tag an EABI version
8843 anyway, so assume APCS. */
8844 arm_abi = ARM_ABI_APCS;
8846 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8848 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8849 int attr_arch, attr_profile;
8853 case EF_ARM_EABI_UNKNOWN:
8854 /* Assume GNU tools. */
8855 arm_abi = ARM_ABI_APCS;
8858 case EF_ARM_EABI_VER4:
8859 case EF_ARM_EABI_VER5:
8860 arm_abi = ARM_ABI_AAPCS;
8861 /* EABI binaries default to VFP float ordering.
8862 They may also contain build attributes that can
8863 be used to identify if the VFP argument-passing
8865 if (fp_model == ARM_FLOAT_AUTO)
8868 switch (bfd_elf_get_obj_attr_int (info.abfd,
8872 case AEABI_VFP_args_base:
8873 /* "The user intended FP parameter/result
8874 passing to conform to AAPCS, base
8876 fp_model = ARM_FLOAT_SOFT_VFP;
8878 case AEABI_VFP_args_vfp:
8879 /* "The user intended FP parameter/result
8880 passing to conform to AAPCS, VFP
8882 fp_model = ARM_FLOAT_VFP;
8884 case AEABI_VFP_args_toolchain:
8885 /* "The user intended FP parameter/result
8886 passing to conform to tool chain-specific
8887 conventions" - we don't know any such
8888 conventions, so leave it as "auto". */
8890 case AEABI_VFP_args_compatible:
8891 /* "Code is compatible with both the base
8892 and VFP variants; the user did not permit
8893 non-variadic functions to pass FP
8894 parameters/results" - leave it as
8898 /* Attribute value not mentioned in the
8899 November 2012 ABI, so leave it as
8904 fp_model = ARM_FLOAT_SOFT_VFP;
8910 /* Leave it as "auto". */
8911 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8916 /* Detect M-profile programs. This only works if the
8917 executable file includes build attributes; GCC does
8918 copy them to the executable, but e.g. RealView does
8920 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8922 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8924 Tag_CPU_arch_profile);
8925 /* GCC specifies the profile for v6-M; RealView only
8926 specifies the profile for architectures starting with
8927 V7 (as opposed to architectures with a tag
8928 numerically greater than TAG_CPU_ARCH_V7). */
8929 if (!tdesc_has_registers (tdesc)
8930 && (attr_arch == TAG_CPU_ARCH_V6_M
8931 || attr_arch == TAG_CPU_ARCH_V6S_M
8932 || attr_profile == 'M'))
8937 if (fp_model == ARM_FLOAT_AUTO)
8939 int e_flags = elf_elfheader (info.abfd)->e_flags;
8941 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8944 /* Leave it as "auto". Strictly speaking this case
8945 means FPA, but almost nobody uses that now, and
8946 many toolchains fail to set the appropriate bits
8947 for the floating-point model they use. */
8949 case EF_ARM_SOFT_FLOAT:
8950 fp_model = ARM_FLOAT_SOFT_FPA;
8952 case EF_ARM_VFP_FLOAT:
8953 fp_model = ARM_FLOAT_VFP;
8955 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8956 fp_model = ARM_FLOAT_SOFT_VFP;
8961 if (e_flags & EF_ARM_BE8)
8962 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8967 /* Leave it as "auto". */
8972 /* Check any target description for validity. */
8973 if (tdesc_has_registers (tdesc))
8975 /* For most registers we require GDB's default names; but also allow
8976 the numeric names for sp / lr / pc, as a convenience. */
8977 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8978 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8979 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8981 const struct tdesc_feature *feature;
8984 feature = tdesc_find_feature (tdesc,
8985 "org.gnu.gdb.arm.core");
8986 if (feature == NULL)
8988 feature = tdesc_find_feature (tdesc,
8989 "org.gnu.gdb.arm.m-profile");
8990 if (feature == NULL)
8996 tdesc_data = tdesc_data_alloc ();
8999 for (i = 0; i < ARM_SP_REGNUM; i++)
9000 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9001 arm_register_names[i]);
9002 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9005 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9008 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9012 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9013 ARM_PS_REGNUM, "xpsr");
9015 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9016 ARM_PS_REGNUM, "cpsr");
9020 tdesc_data_cleanup (tdesc_data);
9024 feature = tdesc_find_feature (tdesc,
9025 "org.gnu.gdb.arm.fpa");
9026 if (feature != NULL)
9029 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9030 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9031 arm_register_names[i]);
9034 tdesc_data_cleanup (tdesc_data);
9039 have_fpa_registers = 0;
9041 feature = tdesc_find_feature (tdesc,
9042 "org.gnu.gdb.xscale.iwmmxt");
9043 if (feature != NULL)
9045 static const char *const iwmmxt_names[] = {
9046 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9047 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9048 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9049 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9053 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9055 &= tdesc_numbered_register (feature, tdesc_data, i,
9056 iwmmxt_names[i - ARM_WR0_REGNUM]);
9058 /* Check for the control registers, but do not fail if they
9060 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9061 tdesc_numbered_register (feature, tdesc_data, i,
9062 iwmmxt_names[i - ARM_WR0_REGNUM]);
9064 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9066 &= tdesc_numbered_register (feature, tdesc_data, i,
9067 iwmmxt_names[i - ARM_WR0_REGNUM]);
9071 tdesc_data_cleanup (tdesc_data);
9075 have_wmmx_registers = 1;
9078 /* If we have a VFP unit, check whether the single precision registers
9079 are present. If not, then we will synthesize them as pseudo
9081 feature = tdesc_find_feature (tdesc,
9082 "org.gnu.gdb.arm.vfp");
9083 if (feature != NULL)
9085 static const char *const vfp_double_names[] = {
9086 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9087 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9088 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9089 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9092 /* Require the double precision registers. There must be either
9095 for (i = 0; i < 32; i++)
9097 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9099 vfp_double_names[i]);
9103 if (!valid_p && i == 16)
9106 /* Also require FPSCR. */
9107 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9108 ARM_FPSCR_REGNUM, "fpscr");
9111 tdesc_data_cleanup (tdesc_data);
9115 if (tdesc_unnumbered_register (feature, "s0") == 0)
9116 have_vfp_pseudos = 1;
9118 vfp_register_count = i;
9120 /* If we have VFP, also check for NEON. The architecture allows
9121 NEON without VFP (integer vector operations only), but GDB
9122 does not support that. */
9123 feature = tdesc_find_feature (tdesc,
9124 "org.gnu.gdb.arm.neon");
9125 if (feature != NULL)
9127 /* NEON requires 32 double-precision registers. */
9130 tdesc_data_cleanup (tdesc_data);
9134 /* If there are quad registers defined by the stub, use
9135 their type; otherwise (normally) provide them with
9136 the default type. */
9137 if (tdesc_unnumbered_register (feature, "q0") == 0)
9138 have_neon_pseudos = 1;
9145 /* If there is already a candidate, use it. */
9146 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9148 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9150 if (arm_abi != ARM_ABI_AUTO
9151 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9154 if (fp_model != ARM_FLOAT_AUTO
9155 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9158 /* There are various other properties in tdep that we do not
9159 need to check here: those derived from a target description,
9160 since gdbarches with a different target description are
9161 automatically disqualified. */
9163 /* Do check is_m, though, since it might come from the binary. */
9164 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9167 /* Found a match. */
9171 if (best_arch != NULL)
9173 if (tdesc_data != NULL)
9174 tdesc_data_cleanup (tdesc_data);
9175 return best_arch->gdbarch;
9178 tdep = XCNEW (struct gdbarch_tdep);
9179 gdbarch = gdbarch_alloc (&info, tdep);
9181 /* Record additional information about the architecture we are defining.
9182 These are gdbarch discriminators, like the OSABI. */
9183 tdep->arm_abi = arm_abi;
9184 tdep->fp_model = fp_model;
9186 tdep->have_fpa_registers = have_fpa_registers;
9187 tdep->have_wmmx_registers = have_wmmx_registers;
9188 gdb_assert (vfp_register_count == 0
9189 || vfp_register_count == 16
9190 || vfp_register_count == 32);
9191 tdep->vfp_register_count = vfp_register_count;
9192 tdep->have_vfp_pseudos = have_vfp_pseudos;
9193 tdep->have_neon_pseudos = have_neon_pseudos;
9194 tdep->have_neon = have_neon;
9196 arm_register_g_packet_guesses (gdbarch);
9199 switch (info.byte_order_for_code)
9201 case BFD_ENDIAN_BIG:
9202 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9203 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9204 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9205 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9209 case BFD_ENDIAN_LITTLE:
9210 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9211 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9212 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9213 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9218 internal_error (__FILE__, __LINE__,
9219 _("arm_gdbarch_init: bad byte order for float format"));
9222 /* On ARM targets char defaults to unsigned. */
9223 set_gdbarch_char_signed (gdbarch, 0);
9225 /* Note: for displaced stepping, this includes the breakpoint, and one word
9226 of additional scratch space. This setting isn't used for anything beside
9227 displaced stepping at present. */
9228 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9230 /* This should be low enough for everything. */
9231 tdep->lowest_pc = 0x20;
9232 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9234 /* The default, for both APCS and AAPCS, is to return small
9235 structures in registers. */
9236 tdep->struct_return = reg_struct_return;
9238 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9239 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9241 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9243 /* Frame handling. */
9244 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9245 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9246 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9248 frame_base_set_default (gdbarch, &arm_normal_base);
9250 /* Address manipulation. */
9251 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9253 /* Advance PC across function entry code. */
9254 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9256 /* Detect whether PC is at a point where the stack has been destroyed. */
9257 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9259 /* Skip trampolines. */
9260 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9262 /* The stack grows downward. */
9263 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9265 /* Breakpoint manipulation. */
9266 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9267 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9268 arm_remote_breakpoint_from_pc);
9270 /* Information about registers, etc. */
9271 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9272 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9273 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9274 set_gdbarch_register_type (gdbarch, arm_register_type);
9275 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9277 /* This "info float" is FPA-specific. Use the generic version if we
9279 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9280 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9282 /* Internal <-> external register number maps. */
9283 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9284 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9286 set_gdbarch_register_name (gdbarch, arm_register_name);
9288 /* Returning results. */
9289 set_gdbarch_return_value (gdbarch, arm_return_value);
9292 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9294 /* Minsymbol frobbing. */
9295 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9296 set_gdbarch_coff_make_msymbol_special (gdbarch,
9297 arm_coff_make_msymbol_special);
9298 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9300 /* Thumb-2 IT block support. */
9301 set_gdbarch_adjust_breakpoint_address (gdbarch,
9302 arm_adjust_breakpoint_address);
9304 /* Virtual tables. */
9305 set_gdbarch_vbit_in_delta (gdbarch, 1);
9307 /* Hook in the ABI-specific overrides, if they have been registered. */
9308 gdbarch_init_osabi (info, gdbarch);
9310 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9312 /* Add some default predicates. */
9314 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9315 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9316 dwarf2_append_unwinders (gdbarch);
9317 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9318 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9320 /* Now we have tuned the configuration, set a few final things,
9321 based on what the OS ABI has told us. */
9323 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9324 binaries are always marked. */
9325 if (tdep->arm_abi == ARM_ABI_AUTO)
9326 tdep->arm_abi = ARM_ABI_APCS;
9328 /* Watchpoints are not steppable. */
9329 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9331 /* We used to default to FPA for generic ARM, but almost nobody
9332 uses that now, and we now provide a way for the user to force
9333 the model. So default to the most useful variant. */
9334 if (tdep->fp_model == ARM_FLOAT_AUTO)
9335 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9337 if (tdep->jb_pc >= 0)
9338 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9340 /* Floating point sizes and format. */
9341 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9342 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9344 set_gdbarch_double_format
9345 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9346 set_gdbarch_long_double_format
9347 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9351 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9352 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9355 if (have_vfp_pseudos)
9357 /* NOTE: These are the only pseudo registers used by
9358 the ARM target at the moment. If more are added, a
9359 little more care in numbering will be needed. */
9361 int num_pseudos = 32;
9362 if (have_neon_pseudos)
9364 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9365 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9366 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9371 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9373 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9375 /* Override tdesc_register_type to adjust the types of VFP
9376 registers for NEON. */
9377 set_gdbarch_register_type (gdbarch, arm_register_type);
9380 /* Add standard register aliases. We add aliases even for those
9381 nanes which are used by the current architecture - it's simpler,
9382 and does no harm, since nothing ever lists user registers. */
9383 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9384 user_reg_add (gdbarch, arm_register_aliases[i].name,
9385 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9391 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9393 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9398 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9399 (unsigned long) tdep->lowest_pc);
9402 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9405 _initialize_arm_tdep (void)
9407 struct ui_file *stb;
9409 struct cmd_list_element *new_set, *new_show;
9410 const char *setname;
9411 const char *setdesc;
9412 const char *const *regnames;
9414 static char *helptext;
9415 char regdesc[1024], *rdptr = regdesc;
9416 size_t rest = sizeof (regdesc);
9418 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9420 arm_objfile_data_key
9421 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9423 /* Add ourselves to objfile event chain. */
9424 observer_attach_new_objfile (arm_exidx_new_objfile);
9426 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9428 /* Register an ELF OS ABI sniffer for ARM binaries. */
9429 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9430 bfd_target_elf_flavour,
9431 arm_elf_osabi_sniffer);
9433 /* Initialize the standard target descriptions. */
9434 initialize_tdesc_arm_with_m ();
9435 initialize_tdesc_arm_with_m_fpa_layout ();
9436 initialize_tdesc_arm_with_m_vfp_d16 ();
9437 initialize_tdesc_arm_with_iwmmxt ();
9438 initialize_tdesc_arm_with_vfpv2 ();
9439 initialize_tdesc_arm_with_vfpv3 ();
9440 initialize_tdesc_arm_with_neon ();
9442 /* Get the number of possible sets of register names defined in opcodes. */
9443 num_disassembly_options = get_arm_regname_num_options ();
9445 /* Add root prefix command for all "set arm"/"show arm" commands. */
9446 add_prefix_cmd ("arm", no_class, set_arm_command,
9447 _("Various ARM-specific commands."),
9448 &setarmcmdlist, "set arm ", 0, &setlist);
9450 add_prefix_cmd ("arm", no_class, show_arm_command,
9451 _("Various ARM-specific commands."),
9452 &showarmcmdlist, "show arm ", 0, &showlist);
9454 /* Sync the opcode insn printer with our register viewer. */
9455 parse_arm_disassembler_option ("reg-names-std");
9457 /* Initialize the array that will be passed to
9458 add_setshow_enum_cmd(). */
9459 valid_disassembly_styles = XNEWVEC (const char *,
9460 num_disassembly_options + 1);
9461 for (i = 0; i < num_disassembly_options; i++)
9463 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
9464 valid_disassembly_styles[i] = setname;
9465 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9468 /* When we find the default names, tell the disassembler to use
9470 if (!strcmp (setname, "std"))
9472 disassembly_style = setname;
9473 set_arm_regname_option (i);
9476 /* Mark the end of valid options. */
9477 valid_disassembly_styles[num_disassembly_options] = NULL;
9479 /* Create the help text. */
9480 stb = mem_fileopen ();
9481 fprintf_unfiltered (stb, "%s%s%s",
9482 _("The valid values are:\n"),
9484 _("The default is \"std\"."));
9485 helptext = ui_file_xstrdup (stb, NULL);
9486 ui_file_delete (stb);
9488 add_setshow_enum_cmd("disassembler", no_class,
9489 valid_disassembly_styles, &disassembly_style,
9490 _("Set the disassembly style."),
9491 _("Show the disassembly style."),
9493 set_disassembly_style_sfunc,
9494 NULL, /* FIXME: i18n: The disassembly style is
9496 &setarmcmdlist, &showarmcmdlist);
9498 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9499 _("Set usage of ARM 32-bit mode."),
9500 _("Show usage of ARM 32-bit mode."),
9501 _("When off, a 26-bit PC will be used."),
9503 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9505 &setarmcmdlist, &showarmcmdlist);
9507 /* Add a command to allow the user to force the FPU model. */
9508 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9509 _("Set the floating point type."),
9510 _("Show the floating point type."),
9511 _("auto - Determine the FP typefrom the OS-ABI.\n\
9512 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9513 fpa - FPA co-processor (GCC compiled).\n\
9514 softvfp - Software FP with pure-endian doubles.\n\
9515 vfp - VFP co-processor."),
9516 set_fp_model_sfunc, show_fp_model,
9517 &setarmcmdlist, &showarmcmdlist);
9519 /* Add a command to allow the user to force the ABI. */
9520 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9523 NULL, arm_set_abi, arm_show_abi,
9524 &setarmcmdlist, &showarmcmdlist);
9526 /* Add two commands to allow the user to force the assumed
9528 add_setshow_enum_cmd ("fallback-mode", class_support,
9529 arm_mode_strings, &arm_fallback_mode_string,
9530 _("Set the mode assumed when symbols are unavailable."),
9531 _("Show the mode assumed when symbols are unavailable."),
9532 NULL, NULL, arm_show_fallback_mode,
9533 &setarmcmdlist, &showarmcmdlist);
9534 add_setshow_enum_cmd ("force-mode", class_support,
9535 arm_mode_strings, &arm_force_mode_string,
9536 _("Set the mode assumed even when symbols are available."),
9537 _("Show the mode assumed even when symbols are available."),
9538 NULL, NULL, arm_show_force_mode,
9539 &setarmcmdlist, &showarmcmdlist);
9541 /* Debugging flag. */
9542 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9543 _("Set ARM debugging."),
9544 _("Show ARM debugging."),
9545 _("When on, arm-specific debugging is enabled."),
9547 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9548 &setdebuglist, &showdebuglist);
9551 /* ARM-reversible process record data structures. */
9553 #define ARM_INSN_SIZE_BYTES 4
9554 #define THUMB_INSN_SIZE_BYTES 2
9555 #define THUMB2_INSN_SIZE_BYTES 4
9558 /* Position of the bit within a 32-bit ARM instruction
9559 that defines whether the instruction is a load or store. */
9560 #define INSN_S_L_BIT_NUM 20
9562 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9565 unsigned int reg_len = LENGTH; \
9568 REGS = XNEWVEC (uint32_t, reg_len); \
9569 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9574 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9577 unsigned int mem_len = LENGTH; \
9580 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9581 memcpy(&MEMS->len, &RECORD_BUF[0], \
9582 sizeof(struct arm_mem_r) * LENGTH); \
9587 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9588 #define INSN_RECORDED(ARM_RECORD) \
9589 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9591 /* ARM memory record structure. */
9594 uint32_t len; /* Record length. */
9595 uint32_t addr; /* Memory address. */
9598 /* ARM instruction record contains opcode of current insn
9599 and execution state (before entry to decode_insn()),
9600 contains list of to-be-modified registers and
9601 memory blocks (on return from decode_insn()). */
9603 typedef struct insn_decode_record_t
9605 struct gdbarch *gdbarch;
9606 struct regcache *regcache;
9607 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9608 uint32_t arm_insn; /* Should accommodate thumb. */
9609 uint32_t cond; /* Condition code. */
9610 uint32_t opcode; /* Insn opcode. */
9611 uint32_t decode; /* Insn decode bits. */
9612 uint32_t mem_rec_count; /* No of mem records. */
9613 uint32_t reg_rec_count; /* No of reg records. */
9614 uint32_t *arm_regs; /* Registers to be saved for this record. */
9615 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9616 } insn_decode_record;
9619 /* Checks ARM SBZ and SBO mandatory fields. */
9622 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9624 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9643 enum arm_record_result
9645 ARM_RECORD_SUCCESS = 0,
9646 ARM_RECORD_FAILURE = 1
9653 } arm_record_strx_t;
9664 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9665 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9668 struct regcache *reg_cache = arm_insn_r->regcache;
9669 ULONGEST u_regval[2]= {0};
9671 uint32_t reg_src1 = 0, reg_src2 = 0;
9672 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9673 uint32_t opcode1 = 0;
9675 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9676 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9677 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
9680 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9682 /* 1) Handle misc store, immediate offset. */
9683 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9684 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9685 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9686 regcache_raw_read_unsigned (reg_cache, reg_src1,
9688 if (ARM_PC_REGNUM == reg_src1)
9690 /* If R15 was used as Rn, hence current PC+8. */
9691 u_regval[0] = u_regval[0] + 8;
9693 offset_8 = (immed_high << 4) | immed_low;
9694 /* Calculate target store address. */
9695 if (14 == arm_insn_r->opcode)
9697 tgt_mem_addr = u_regval[0] + offset_8;
9701 tgt_mem_addr = u_regval[0] - offset_8;
9703 if (ARM_RECORD_STRH == str_type)
9705 record_buf_mem[0] = 2;
9706 record_buf_mem[1] = tgt_mem_addr;
9707 arm_insn_r->mem_rec_count = 1;
9709 else if (ARM_RECORD_STRD == str_type)
9711 record_buf_mem[0] = 4;
9712 record_buf_mem[1] = tgt_mem_addr;
9713 record_buf_mem[2] = 4;
9714 record_buf_mem[3] = tgt_mem_addr + 4;
9715 arm_insn_r->mem_rec_count = 2;
9718 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9720 /* 2) Store, register offset. */
9722 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9724 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9725 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9726 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9729 /* If R15 was used as Rn, hence current PC+8. */
9730 u_regval[0] = u_regval[0] + 8;
9732 /* Calculate target store address, Rn +/- Rm, register offset. */
9733 if (12 == arm_insn_r->opcode)
9735 tgt_mem_addr = u_regval[0] + u_regval[1];
9739 tgt_mem_addr = u_regval[1] - u_regval[0];
9741 if (ARM_RECORD_STRH == str_type)
9743 record_buf_mem[0] = 2;
9744 record_buf_mem[1] = tgt_mem_addr;
9745 arm_insn_r->mem_rec_count = 1;
9747 else if (ARM_RECORD_STRD == str_type)
9749 record_buf_mem[0] = 4;
9750 record_buf_mem[1] = tgt_mem_addr;
9751 record_buf_mem[2] = 4;
9752 record_buf_mem[3] = tgt_mem_addr + 4;
9753 arm_insn_r->mem_rec_count = 2;
9756 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9757 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9759 /* 3) Store, immediate pre-indexed. */
9760 /* 5) Store, immediate post-indexed. */
9761 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9762 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9763 offset_8 = (immed_high << 4) | immed_low;
9764 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9766 /* Calculate target store address, Rn +/- Rm, register offset. */
9767 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9769 tgt_mem_addr = u_regval[0] + offset_8;
9773 tgt_mem_addr = u_regval[0] - offset_8;
9775 if (ARM_RECORD_STRH == str_type)
9777 record_buf_mem[0] = 2;
9778 record_buf_mem[1] = tgt_mem_addr;
9779 arm_insn_r->mem_rec_count = 1;
9781 else if (ARM_RECORD_STRD == str_type)
9783 record_buf_mem[0] = 4;
9784 record_buf_mem[1] = tgt_mem_addr;
9785 record_buf_mem[2] = 4;
9786 record_buf_mem[3] = tgt_mem_addr + 4;
9787 arm_insn_r->mem_rec_count = 2;
9789 /* Record Rn also as it changes. */
9790 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9791 arm_insn_r->reg_rec_count = 1;
9793 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9794 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9796 /* 4) Store, register pre-indexed. */
9797 /* 6) Store, register post -indexed. */
9798 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9800 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9801 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9802 /* Calculate target store address, Rn +/- Rm, register offset. */
9803 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9805 tgt_mem_addr = u_regval[0] + u_regval[1];
9809 tgt_mem_addr = u_regval[1] - u_regval[0];
9811 if (ARM_RECORD_STRH == str_type)
9813 record_buf_mem[0] = 2;
9814 record_buf_mem[1] = tgt_mem_addr;
9815 arm_insn_r->mem_rec_count = 1;
9817 else if (ARM_RECORD_STRD == str_type)
9819 record_buf_mem[0] = 4;
9820 record_buf_mem[1] = tgt_mem_addr;
9821 record_buf_mem[2] = 4;
9822 record_buf_mem[3] = tgt_mem_addr + 4;
9823 arm_insn_r->mem_rec_count = 2;
9825 /* Record Rn also as it changes. */
9826 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9827 arm_insn_r->reg_rec_count = 1;
9832 /* Handling ARM extension space insns. */
9835 arm_record_extension_space (insn_decode_record *arm_insn_r)
9837 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9838 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9839 uint32_t record_buf[8], record_buf_mem[8];
9840 uint32_t reg_src1 = 0;
9841 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9842 struct regcache *reg_cache = arm_insn_r->regcache;
9843 ULONGEST u_regval = 0;
9845 gdb_assert (!INSN_RECORDED(arm_insn_r));
9846 /* Handle unconditional insn extension space. */
9848 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9849 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9850 if (arm_insn_r->cond)
9852 /* PLD has no affect on architectural state, it just affects
9854 if (5 == ((opcode1 & 0xE0) >> 5))
9857 record_buf[0] = ARM_PS_REGNUM;
9858 record_buf[1] = ARM_LR_REGNUM;
9859 arm_insn_r->reg_rec_count = 2;
9861 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9865 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9866 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9869 /* Undefined instruction on ARM V5; need to handle if later
9870 versions define it. */
9873 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9874 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9875 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9877 /* Handle arithmetic insn extension space. */
9878 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9879 && !INSN_RECORDED(arm_insn_r))
9881 /* Handle MLA(S) and MUL(S). */
9882 if (0 <= insn_op1 && 3 >= insn_op1)
9884 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9885 record_buf[1] = ARM_PS_REGNUM;
9886 arm_insn_r->reg_rec_count = 2;
9888 else if (4 <= insn_op1 && 15 >= insn_op1)
9890 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9891 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9892 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9893 record_buf[2] = ARM_PS_REGNUM;
9894 arm_insn_r->reg_rec_count = 3;
9898 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9899 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9900 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9902 /* Handle control insn extension space. */
9904 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9905 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9907 if (!bit (arm_insn_r->arm_insn,25))
9909 if (!bits (arm_insn_r->arm_insn, 4, 7))
9911 if ((0 == insn_op1) || (2 == insn_op1))
9914 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9915 arm_insn_r->reg_rec_count = 1;
9917 else if (1 == insn_op1)
9919 /* CSPR is going to be changed. */
9920 record_buf[0] = ARM_PS_REGNUM;
9921 arm_insn_r->reg_rec_count = 1;
9923 else if (3 == insn_op1)
9925 /* SPSR is going to be changed. */
9926 /* We need to get SPSR value, which is yet to be done. */
9927 printf_unfiltered (_("Process record does not support "
9928 "instruction 0x%0x at address %s.\n"),
9929 arm_insn_r->arm_insn,
9930 paddress (arm_insn_r->gdbarch,
9931 arm_insn_r->this_addr));
9935 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9940 record_buf[0] = ARM_PS_REGNUM;
9941 arm_insn_r->reg_rec_count = 1;
9943 else if (3 == insn_op1)
9946 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9947 arm_insn_r->reg_rec_count = 1;
9950 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9953 record_buf[0] = ARM_PS_REGNUM;
9954 record_buf[1] = ARM_LR_REGNUM;
9955 arm_insn_r->reg_rec_count = 2;
9957 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9959 /* QADD, QSUB, QDADD, QDSUB */
9960 record_buf[0] = ARM_PS_REGNUM;
9961 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9962 arm_insn_r->reg_rec_count = 2;
9964 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9967 record_buf[0] = ARM_PS_REGNUM;
9968 record_buf[1] = ARM_LR_REGNUM;
9969 arm_insn_r->reg_rec_count = 2;
9971 /* Save SPSR also;how? */
9972 printf_unfiltered (_("Process record does not support "
9973 "instruction 0x%0x at address %s.\n"),
9974 arm_insn_r->arm_insn,
9975 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
9978 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9979 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9980 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9981 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9984 if (0 == insn_op1 || 1 == insn_op1)
9986 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9987 /* We dont do optimization for SMULW<y> where we
9989 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9990 record_buf[1] = ARM_PS_REGNUM;
9991 arm_insn_r->reg_rec_count = 2;
9993 else if (2 == insn_op1)
9996 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9997 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9998 arm_insn_r->reg_rec_count = 2;
10000 else if (3 == insn_op1)
10003 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10004 arm_insn_r->reg_rec_count = 1;
10010 /* MSR : immediate form. */
10013 /* CSPR is going to be changed. */
10014 record_buf[0] = ARM_PS_REGNUM;
10015 arm_insn_r->reg_rec_count = 1;
10017 else if (3 == insn_op1)
10019 /* SPSR is going to be changed. */
10020 /* we need to get SPSR value, which is yet to be done */
10021 printf_unfiltered (_("Process record does not support "
10022 "instruction 0x%0x at address %s.\n"),
10023 arm_insn_r->arm_insn,
10024 paddress (arm_insn_r->gdbarch,
10025 arm_insn_r->this_addr));
10031 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10032 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10033 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10035 /* Handle load/store insn extension space. */
10037 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10038 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10039 && !INSN_RECORDED(arm_insn_r))
10044 /* These insn, changes register and memory as well. */
10045 /* SWP or SWPB insn. */
10046 /* Get memory address given by Rn. */
10047 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10048 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10049 /* SWP insn ?, swaps word. */
10050 if (8 == arm_insn_r->opcode)
10052 record_buf_mem[0] = 4;
10056 /* SWPB insn, swaps only byte. */
10057 record_buf_mem[0] = 1;
10059 record_buf_mem[1] = u_regval;
10060 arm_insn_r->mem_rec_count = 1;
10061 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10062 arm_insn_r->reg_rec_count = 1;
10064 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10067 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10070 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10074 record_buf[1] = record_buf[0] + 1;
10075 arm_insn_r->reg_rec_count = 2;
10077 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10080 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10083 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10085 /* LDRH, LDRSB, LDRSH. */
10086 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10087 arm_insn_r->reg_rec_count = 1;
10092 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10093 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10094 && !INSN_RECORDED(arm_insn_r))
10097 /* Handle coprocessor insn extension space. */
10100 /* To be done for ARMv5 and later; as of now we return -1. */
10102 printf_unfiltered (_("Process record does not support instruction x%0x "
10103 "at address %s.\n"),arm_insn_r->arm_insn,
10104 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10107 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10108 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10113 /* Handling opcode 000 insns. */
10116 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10118 struct regcache *reg_cache = arm_insn_r->regcache;
10119 uint32_t record_buf[8], record_buf_mem[8];
10120 ULONGEST u_regval[2] = {0};
10122 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10123 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
10124 uint32_t opcode1 = 0;
10126 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10127 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10128 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10130 /* Data processing insn /multiply insn. */
10131 if (9 == arm_insn_r->decode
10132 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10133 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10135 /* Handle multiply instructions. */
10136 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10137 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10139 /* Handle MLA and MUL. */
10140 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10141 record_buf[1] = ARM_PS_REGNUM;
10142 arm_insn_r->reg_rec_count = 2;
10144 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10146 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10147 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10148 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10149 record_buf[2] = ARM_PS_REGNUM;
10150 arm_insn_r->reg_rec_count = 3;
10153 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10154 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10156 /* Handle misc load insns, as 20th bit (L = 1). */
10157 /* LDR insn has a capability to do branching, if
10158 MOV LR, PC is precceded by LDR insn having Rn as R15
10159 in that case, it emulates branch and link insn, and hence we
10160 need to save CSPR and PC as well. I am not sure this is right
10161 place; as opcode = 010 LDR insn make this happen, if R15 was
10163 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10164 if (15 != reg_dest)
10166 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10167 arm_insn_r->reg_rec_count = 1;
10171 record_buf[0] = reg_dest;
10172 record_buf[1] = ARM_PS_REGNUM;
10173 arm_insn_r->reg_rec_count = 2;
10176 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10177 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10178 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10179 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10181 /* Handle MSR insn. */
10182 if (9 == arm_insn_r->opcode)
10184 /* CSPR is going to be changed. */
10185 record_buf[0] = ARM_PS_REGNUM;
10186 arm_insn_r->reg_rec_count = 1;
10190 /* SPSR is going to be changed. */
10191 /* How to read SPSR value? */
10192 printf_unfiltered (_("Process record does not support instruction "
10193 "0x%0x at address %s.\n"),
10194 arm_insn_r->arm_insn,
10195 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10199 else if (9 == arm_insn_r->decode
10200 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10201 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10203 /* Handling SWP, SWPB. */
10204 /* These insn, changes register and memory as well. */
10205 /* SWP or SWPB insn. */
10207 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10208 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10209 /* SWP insn ?, swaps word. */
10210 if (8 == arm_insn_r->opcode)
10212 record_buf_mem[0] = 4;
10216 /* SWPB insn, swaps only byte. */
10217 record_buf_mem[0] = 1;
10219 record_buf_mem[1] = u_regval[0];
10220 arm_insn_r->mem_rec_count = 1;
10221 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10222 arm_insn_r->reg_rec_count = 1;
10224 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10225 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10227 /* Handle BLX, branch and link/exchange. */
10228 if (9 == arm_insn_r->opcode)
10230 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10231 and R14 stores the return address. */
10232 record_buf[0] = ARM_PS_REGNUM;
10233 record_buf[1] = ARM_LR_REGNUM;
10234 arm_insn_r->reg_rec_count = 2;
10237 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10239 /* Handle enhanced software breakpoint insn, BKPT. */
10240 /* CPSR is changed to be executed in ARM state, disabling normal
10241 interrupts, entering abort mode. */
10242 /* According to high vector configuration PC is set. */
10243 /* user hit breakpoint and type reverse, in
10244 that case, we need to go back with previous CPSR and
10245 Program Counter. */
10246 record_buf[0] = ARM_PS_REGNUM;
10247 record_buf[1] = ARM_LR_REGNUM;
10248 arm_insn_r->reg_rec_count = 2;
10250 /* Save SPSR also; how? */
10251 printf_unfiltered (_("Process record does not support instruction "
10252 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10253 paddress (arm_insn_r->gdbarch,
10254 arm_insn_r->this_addr));
10257 else if (11 == arm_insn_r->decode
10258 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10260 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10262 /* Handle str(x) insn */
10263 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10266 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10267 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10269 /* Handle BX, branch and link/exchange. */
10270 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10271 record_buf[0] = ARM_PS_REGNUM;
10272 arm_insn_r->reg_rec_count = 1;
10274 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10275 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10276 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10278 /* Count leading zeros: CLZ. */
10279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10280 arm_insn_r->reg_rec_count = 1;
10282 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10283 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10284 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10285 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10288 /* Handle MRS insn. */
10289 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10290 arm_insn_r->reg_rec_count = 1;
10292 else if (arm_insn_r->opcode <= 15)
10294 /* Normal data processing insns. */
10295 /* Out of 11 shifter operands mode, all the insn modifies destination
10296 register, which is specified by 13-16 decode. */
10297 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10298 record_buf[1] = ARM_PS_REGNUM;
10299 arm_insn_r->reg_rec_count = 2;
10306 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10307 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10311 /* Handling opcode 001 insns. */
10314 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10316 uint32_t record_buf[8], record_buf_mem[8];
10318 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10319 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10321 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10322 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10323 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10326 /* Handle MSR insn. */
10327 if (9 == arm_insn_r->opcode)
10329 /* CSPR is going to be changed. */
10330 record_buf[0] = ARM_PS_REGNUM;
10331 arm_insn_r->reg_rec_count = 1;
10335 /* SPSR is going to be changed. */
10338 else if (arm_insn_r->opcode <= 15)
10340 /* Normal data processing insns. */
10341 /* Out of 11 shifter operands mode, all the insn modifies destination
10342 register, which is specified by 13-16 decode. */
10343 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10344 record_buf[1] = ARM_PS_REGNUM;
10345 arm_insn_r->reg_rec_count = 2;
10352 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10353 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10357 /* Handle ARM mode instructions with opcode 010. */
10360 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10362 struct regcache *reg_cache = arm_insn_r->regcache;
10364 uint32_t reg_base , reg_dest;
10365 uint32_t offset_12, tgt_mem_addr;
10366 uint32_t record_buf[8], record_buf_mem[8];
10367 unsigned char wback;
10370 /* Calculate wback. */
10371 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10372 || (bit (arm_insn_r->arm_insn, 21) == 1);
10374 arm_insn_r->reg_rec_count = 0;
10375 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10377 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10379 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10382 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10383 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10385 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10386 preceeds a LDR instruction having R15 as reg_base, it
10387 emulates a branch and link instruction, and hence we need to save
10388 CPSR and PC as well. */
10389 if (ARM_PC_REGNUM == reg_dest)
10390 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10392 /* If wback is true, also save the base register, which is going to be
10395 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10399 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10401 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10402 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10404 /* Handle bit U. */
10405 if (bit (arm_insn_r->arm_insn, 23))
10407 /* U == 1: Add the offset. */
10408 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10412 /* U == 0: subtract the offset. */
10413 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10416 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10418 if (bit (arm_insn_r->arm_insn, 22))
10420 /* STRB and STRBT: 1 byte. */
10421 record_buf_mem[0] = 1;
10425 /* STR and STRT: 4 bytes. */
10426 record_buf_mem[0] = 4;
10429 /* Handle bit P. */
10430 if (bit (arm_insn_r->arm_insn, 24))
10431 record_buf_mem[1] = tgt_mem_addr;
10433 record_buf_mem[1] = (uint32_t) u_regval;
10435 arm_insn_r->mem_rec_count = 1;
10437 /* If wback is true, also save the base register, which is going to be
10440 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10443 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10444 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10448 /* Handling opcode 011 insns. */
10451 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10453 struct regcache *reg_cache = arm_insn_r->regcache;
10455 uint32_t shift_imm = 0;
10456 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10457 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10458 uint32_t record_buf[8], record_buf_mem[8];
10461 ULONGEST u_regval[2];
10463 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10464 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10466 /* Handle enhanced store insns and LDRD DSP insn,
10467 order begins according to addressing modes for store insns
10471 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10473 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10474 /* LDR insn has a capability to do branching, if
10475 MOV LR, PC is precedded by LDR insn having Rn as R15
10476 in that case, it emulates branch and link insn, and hence we
10477 need to save CSPR and PC as well. */
10478 if (15 != reg_dest)
10480 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10481 arm_insn_r->reg_rec_count = 1;
10485 record_buf[0] = reg_dest;
10486 record_buf[1] = ARM_PS_REGNUM;
10487 arm_insn_r->reg_rec_count = 2;
10492 if (! bits (arm_insn_r->arm_insn, 4, 11))
10494 /* Store insn, register offset and register pre-indexed,
10495 register post-indexed. */
10497 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10499 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10500 regcache_raw_read_unsigned (reg_cache, reg_src1
10502 regcache_raw_read_unsigned (reg_cache, reg_src2
10504 if (15 == reg_src2)
10506 /* If R15 was used as Rn, hence current PC+8. */
10507 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10508 u_regval[0] = u_regval[0] + 8;
10510 /* Calculate target store address, Rn +/- Rm, register offset. */
10512 if (bit (arm_insn_r->arm_insn, 23))
10514 tgt_mem_addr = u_regval[0] + u_regval[1];
10518 tgt_mem_addr = u_regval[1] - u_regval[0];
10521 switch (arm_insn_r->opcode)
10535 record_buf_mem[0] = 4;
10550 record_buf_mem[0] = 1;
10554 gdb_assert_not_reached ("no decoding pattern found");
10557 record_buf_mem[1] = tgt_mem_addr;
10558 arm_insn_r->mem_rec_count = 1;
10560 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10561 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10562 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10563 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10564 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10565 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10568 /* Rn is going to be changed in pre-indexed mode and
10569 post-indexed mode as well. */
10570 record_buf[0] = reg_src2;
10571 arm_insn_r->reg_rec_count = 1;
10576 /* Store insn, scaled register offset; scaled pre-indexed. */
10577 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10579 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10581 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10582 /* Get shift_imm. */
10583 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10584 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10585 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10586 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10587 /* Offset_12 used as shift. */
10591 /* Offset_12 used as index. */
10592 offset_12 = u_regval[0] << shift_imm;
10596 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10602 if (bit (u_regval[0], 31))
10604 offset_12 = 0xFFFFFFFF;
10613 /* This is arithmetic shift. */
10614 offset_12 = s_word >> shift_imm;
10621 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10623 /* Get C flag value and shift it by 31. */
10624 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10625 | (u_regval[0]) >> 1);
10629 offset_12 = (u_regval[0] >> shift_imm) \
10631 (sizeof(uint32_t) - shift_imm));
10636 gdb_assert_not_reached ("no decoding pattern found");
10640 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10642 if (bit (arm_insn_r->arm_insn, 23))
10644 tgt_mem_addr = u_regval[1] + offset_12;
10648 tgt_mem_addr = u_regval[1] - offset_12;
10651 switch (arm_insn_r->opcode)
10665 record_buf_mem[0] = 4;
10680 record_buf_mem[0] = 1;
10684 gdb_assert_not_reached ("no decoding pattern found");
10687 record_buf_mem[1] = tgt_mem_addr;
10688 arm_insn_r->mem_rec_count = 1;
10690 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10691 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10692 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10693 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10694 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10695 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10698 /* Rn is going to be changed in register scaled pre-indexed
10699 mode,and scaled post indexed mode. */
10700 record_buf[0] = reg_src2;
10701 arm_insn_r->reg_rec_count = 1;
10706 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10707 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10711 /* Handle ARM mode instructions with opcode 100. */
10714 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10716 struct regcache *reg_cache = arm_insn_r->regcache;
10717 uint32_t register_count = 0, register_bits;
10718 uint32_t reg_base, addr_mode;
10719 uint32_t record_buf[24], record_buf_mem[48];
10723 /* Fetch the list of registers. */
10724 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10725 arm_insn_r->reg_rec_count = 0;
10727 /* Fetch the base register that contains the address we are loading data
10729 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10731 /* Calculate wback. */
10732 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10734 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10736 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10738 /* Find out which registers are going to be loaded from memory. */
10739 while (register_bits)
10741 if (register_bits & 0x00000001)
10742 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10743 register_bits = register_bits >> 1;
10748 /* If wback is true, also save the base register, which is going to be
10751 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10753 /* Save the CPSR register. */
10754 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10758 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10760 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10762 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10764 /* Find out how many registers are going to be stored to memory. */
10765 while (register_bits)
10767 if (register_bits & 0x00000001)
10769 register_bits = register_bits >> 1;
10774 /* STMDA (STMED): Decrement after. */
10776 record_buf_mem[1] = (uint32_t) u_regval
10777 - register_count * INT_REGISTER_SIZE + 4;
10779 /* STM (STMIA, STMEA): Increment after. */
10781 record_buf_mem[1] = (uint32_t) u_regval;
10783 /* STMDB (STMFD): Decrement before. */
10785 record_buf_mem[1] = (uint32_t) u_regval
10786 - register_count * INT_REGISTER_SIZE;
10788 /* STMIB (STMFA): Increment before. */
10790 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10793 gdb_assert_not_reached ("no decoding pattern found");
10797 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10798 arm_insn_r->mem_rec_count = 1;
10800 /* If wback is true, also save the base register, which is going to be
10803 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10806 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10807 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10811 /* Handling opcode 101 insns. */
10814 arm_record_b_bl (insn_decode_record *arm_insn_r)
10816 uint32_t record_buf[8];
10818 /* Handle B, BL, BLX(1) insns. */
10819 /* B simply branches so we do nothing here. */
10820 /* Note: BLX(1) doesnt fall here but instead it falls into
10821 extension space. */
10822 if (bit (arm_insn_r->arm_insn, 24))
10824 record_buf[0] = ARM_LR_REGNUM;
10825 arm_insn_r->reg_rec_count = 1;
10828 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10833 /* Handling opcode 110 insns. */
10836 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10838 printf_unfiltered (_("Process record does not support instruction "
10839 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10840 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10845 /* Record handler for vector data transfer instructions. */
10848 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10850 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10851 uint32_t record_buf[4];
10853 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10854 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10855 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10856 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10857 bit_l = bit (arm_insn_r->arm_insn, 20);
10858 bit_c = bit (arm_insn_r->arm_insn, 8);
10860 /* Handle VMOV instruction. */
10861 if (bit_l && bit_c)
10863 record_buf[0] = reg_t;
10864 arm_insn_r->reg_rec_count = 1;
10866 else if (bit_l && !bit_c)
10868 /* Handle VMOV instruction. */
10869 if (bits_a == 0x00)
10871 if (bit (arm_insn_r->arm_insn, 20))
10872 record_buf[0] = reg_t;
10874 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10877 arm_insn_r->reg_rec_count = 1;
10879 /* Handle VMRS instruction. */
10880 else if (bits_a == 0x07)
10883 reg_t = ARM_PS_REGNUM;
10885 record_buf[0] = reg_t;
10886 arm_insn_r->reg_rec_count = 1;
10889 else if (!bit_l && !bit_c)
10891 /* Handle VMOV instruction. */
10892 if (bits_a == 0x00)
10894 if (bit (arm_insn_r->arm_insn, 20))
10895 record_buf[0] = reg_t;
10897 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10900 arm_insn_r->reg_rec_count = 1;
10902 /* Handle VMSR instruction. */
10903 else if (bits_a == 0x07)
10905 record_buf[0] = ARM_FPSCR_REGNUM;
10906 arm_insn_r->reg_rec_count = 1;
10909 else if (!bit_l && bit_c)
10911 /* Handle VMOV instruction. */
10912 if (!(bits_a & 0x04))
10914 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
10916 arm_insn_r->reg_rec_count = 1;
10918 /* Handle VDUP instruction. */
10921 if (bit (arm_insn_r->arm_insn, 21))
10923 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10924 record_buf[0] = reg_v + ARM_D0_REGNUM;
10925 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
10926 arm_insn_r->reg_rec_count = 2;
10930 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10931 record_buf[0] = reg_v + ARM_D0_REGNUM;
10932 arm_insn_r->reg_rec_count = 1;
10937 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10941 /* Record handler for extension register load/store instructions. */
10944 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
10946 uint32_t opcode, single_reg;
10947 uint8_t op_vldm_vstm;
10948 uint32_t record_buf[8], record_buf_mem[128];
10949 ULONGEST u_regval = 0;
10951 struct regcache *reg_cache = arm_insn_r->regcache;
10952 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10954 opcode = bits (arm_insn_r->arm_insn, 20, 24);
10955 single_reg = bit (arm_insn_r->arm_insn, 8);
10956 op_vldm_vstm = opcode & 0x1b;
10958 /* Handle VMOV instructions. */
10959 if ((opcode & 0x1e) == 0x04)
10961 if (bit (arm_insn_r->arm_insn, 4))
10963 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10964 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10965 arm_insn_r->reg_rec_count = 2;
10969 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
10970 | bit (arm_insn_r->arm_insn, 5);
10974 record_buf[0] = num_regs + reg_m;
10975 record_buf[1] = num_regs + reg_m + 1;
10976 arm_insn_r->reg_rec_count = 2;
10980 record_buf[0] = reg_m + ARM_D0_REGNUM;
10981 arm_insn_r->reg_rec_count = 1;
10985 /* Handle VSTM and VPUSH instructions. */
10986 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
10987 || op_vldm_vstm == 0x12)
10989 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
10990 uint32_t memory_index = 0;
10992 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
10993 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
10994 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
10995 imm_off32 = imm_off8 << 24;
10996 memory_count = imm_off8;
10998 if (bit (arm_insn_r->arm_insn, 23))
10999 start_address = u_regval;
11001 start_address = u_regval - imm_off32;
11003 if (bit (arm_insn_r->arm_insn, 21))
11005 record_buf[0] = reg_rn;
11006 arm_insn_r->reg_rec_count = 1;
11009 while (memory_count > 0)
11013 record_buf_mem[memory_index] = start_address;
11014 record_buf_mem[memory_index + 1] = 4;
11015 start_address = start_address + 4;
11016 memory_index = memory_index + 2;
11020 record_buf_mem[memory_index] = start_address;
11021 record_buf_mem[memory_index + 1] = 4;
11022 record_buf_mem[memory_index + 2] = start_address + 4;
11023 record_buf_mem[memory_index + 3] = 4;
11024 start_address = start_address + 8;
11025 memory_index = memory_index + 4;
11029 arm_insn_r->mem_rec_count = (memory_index >> 1);
11031 /* Handle VLDM instructions. */
11032 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11033 || op_vldm_vstm == 0x13)
11035 uint32_t reg_count, reg_vd;
11036 uint32_t reg_index = 0;
11038 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11039 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11042 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11044 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11046 if (bit (arm_insn_r->arm_insn, 21))
11047 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11049 while (reg_count > 0)
11052 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
11054 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11058 arm_insn_r->reg_rec_count = reg_index;
11060 /* VSTR Vector store register. */
11061 else if ((opcode & 0x13) == 0x10)
11063 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11064 uint32_t memory_index = 0;
11066 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11067 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11068 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11069 imm_off32 = imm_off8 << 24;
11070 memory_count = imm_off8;
11072 if (bit (arm_insn_r->arm_insn, 23))
11073 start_address = u_regval + imm_off32;
11075 start_address = u_regval - imm_off32;
11079 record_buf_mem[memory_index] = start_address;
11080 record_buf_mem[memory_index + 1] = 4;
11081 arm_insn_r->mem_rec_count = 1;
11085 record_buf_mem[memory_index] = start_address;
11086 record_buf_mem[memory_index + 1] = 4;
11087 record_buf_mem[memory_index + 2] = start_address + 4;
11088 record_buf_mem[memory_index + 3] = 4;
11089 arm_insn_r->mem_rec_count = 2;
11092 /* VLDR Vector load register. */
11093 else if ((opcode & 0x13) == 0x11)
11095 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11099 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11100 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11104 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11105 record_buf[0] = num_regs + reg_vd;
11107 arm_insn_r->reg_rec_count = 1;
11110 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11111 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11115 /* Record handler for arm/thumb mode VFP data processing instructions. */
11118 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11120 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11121 uint32_t record_buf[4];
11122 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11123 enum insn_types curr_insn_type = INSN_INV;
11125 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11126 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11127 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11128 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11129 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11130 bit_d = bit (arm_insn_r->arm_insn, 22);
11131 opc1 = opc1 & 0x04;
11133 /* Handle VMLA, VMLS. */
11136 if (bit (arm_insn_r->arm_insn, 10))
11138 if (bit (arm_insn_r->arm_insn, 6))
11139 curr_insn_type = INSN_T0;
11141 curr_insn_type = INSN_T1;
11146 curr_insn_type = INSN_T1;
11148 curr_insn_type = INSN_T2;
11151 /* Handle VNMLA, VNMLS, VNMUL. */
11152 else if (opc1 == 0x01)
11155 curr_insn_type = INSN_T1;
11157 curr_insn_type = INSN_T2;
11160 else if (opc1 == 0x02 && !(opc3 & 0x01))
11162 if (bit (arm_insn_r->arm_insn, 10))
11164 if (bit (arm_insn_r->arm_insn, 6))
11165 curr_insn_type = INSN_T0;
11167 curr_insn_type = INSN_T1;
11172 curr_insn_type = INSN_T1;
11174 curr_insn_type = INSN_T2;
11177 /* Handle VADD, VSUB. */
11178 else if (opc1 == 0x03)
11180 if (!bit (arm_insn_r->arm_insn, 9))
11182 if (bit (arm_insn_r->arm_insn, 6))
11183 curr_insn_type = INSN_T0;
11185 curr_insn_type = INSN_T1;
11190 curr_insn_type = INSN_T1;
11192 curr_insn_type = INSN_T2;
11196 else if (opc1 == 0x0b)
11199 curr_insn_type = INSN_T1;
11201 curr_insn_type = INSN_T2;
11203 /* Handle all other vfp data processing instructions. */
11204 else if (opc1 == 0x0b)
11207 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11209 if (bit (arm_insn_r->arm_insn, 4))
11211 if (bit (arm_insn_r->arm_insn, 6))
11212 curr_insn_type = INSN_T0;
11214 curr_insn_type = INSN_T1;
11219 curr_insn_type = INSN_T1;
11221 curr_insn_type = INSN_T2;
11224 /* Handle VNEG and VABS. */
11225 else if ((opc2 == 0x01 && opc3 == 0x01)
11226 || (opc2 == 0x00 && opc3 == 0x03))
11228 if (!bit (arm_insn_r->arm_insn, 11))
11230 if (bit (arm_insn_r->arm_insn, 6))
11231 curr_insn_type = INSN_T0;
11233 curr_insn_type = INSN_T1;
11238 curr_insn_type = INSN_T1;
11240 curr_insn_type = INSN_T2;
11243 /* Handle VSQRT. */
11244 else if (opc2 == 0x01 && opc3 == 0x03)
11247 curr_insn_type = INSN_T1;
11249 curr_insn_type = INSN_T2;
11252 else if (opc2 == 0x07 && opc3 == 0x03)
11255 curr_insn_type = INSN_T1;
11257 curr_insn_type = INSN_T2;
11259 else if (opc3 & 0x01)
11262 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11264 if (!bit (arm_insn_r->arm_insn, 18))
11265 curr_insn_type = INSN_T2;
11269 curr_insn_type = INSN_T1;
11271 curr_insn_type = INSN_T2;
11275 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11278 curr_insn_type = INSN_T1;
11280 curr_insn_type = INSN_T2;
11282 /* Handle VCVTB, VCVTT. */
11283 else if ((opc2 & 0x0e) == 0x02)
11284 curr_insn_type = INSN_T2;
11285 /* Handle VCMP, VCMPE. */
11286 else if ((opc2 & 0x0e) == 0x04)
11287 curr_insn_type = INSN_T3;
11291 switch (curr_insn_type)
11294 reg_vd = reg_vd | (bit_d << 4);
11295 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11296 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11297 arm_insn_r->reg_rec_count = 2;
11301 reg_vd = reg_vd | (bit_d << 4);
11302 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11303 arm_insn_r->reg_rec_count = 1;
11307 reg_vd = (reg_vd << 1) | bit_d;
11308 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11309 arm_insn_r->reg_rec_count = 1;
11313 record_buf[0] = ARM_FPSCR_REGNUM;
11314 arm_insn_r->reg_rec_count = 1;
11318 gdb_assert_not_reached ("no decoding pattern found");
11322 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11326 /* Handling opcode 110 insns. */
11329 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11331 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
11333 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11334 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11335 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11337 if ((coproc & 0x0e) == 0x0a)
11339 /* Handle extension register ld/st instructions. */
11341 return arm_record_exreg_ld_st_insn (arm_insn_r);
11343 /* 64-bit transfers between arm core and extension registers. */
11344 if ((op1 & 0x3e) == 0x04)
11345 return arm_record_exreg_ld_st_insn (arm_insn_r);
11349 /* Handle coprocessor ld/st instructions. */
11354 return arm_record_unsupported_insn (arm_insn_r);
11357 return arm_record_unsupported_insn (arm_insn_r);
11360 /* Move to coprocessor from two arm core registers. */
11362 return arm_record_unsupported_insn (arm_insn_r);
11364 /* Move to two arm core registers from coprocessor. */
11369 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11370 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11371 arm_insn_r->reg_rec_count = 2;
11373 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11377 return arm_record_unsupported_insn (arm_insn_r);
11380 /* Handling opcode 111 insns. */
11383 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11385 uint32_t op, op1_sbit, op1_ebit, coproc;
11386 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11387 struct regcache *reg_cache = arm_insn_r->regcache;
11388 ULONGEST u_regval = 0;
11390 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11391 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11392 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11393 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11394 op = bit (arm_insn_r->arm_insn, 4);
11396 /* Handle arm SWI/SVC system call instructions. */
11399 if (tdep->arm_syscall_record != NULL)
11401 ULONGEST svc_operand, svc_number;
11403 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11405 if (svc_operand) /* OABI. */
11406 svc_number = svc_operand - 0x900000;
11408 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11410 return tdep->arm_syscall_record (reg_cache, svc_number);
11414 printf_unfiltered (_("no syscall record support\n"));
11419 if ((coproc & 0x0e) == 0x0a)
11421 /* VFP data-processing instructions. */
11422 if (!op1_sbit && !op)
11423 return arm_record_vfp_data_proc_insn (arm_insn_r);
11425 /* Advanced SIMD, VFP instructions. */
11426 if (!op1_sbit && op)
11427 return arm_record_vdata_transfer_insn (arm_insn_r);
11431 /* Coprocessor data operations. */
11432 if (!op1_sbit && !op)
11433 return arm_record_unsupported_insn (arm_insn_r);
11435 /* Move to Coprocessor from ARM core register. */
11436 if (!op1_sbit && !op1_ebit && op)
11437 return arm_record_unsupported_insn (arm_insn_r);
11439 /* Move to arm core register from coprocessor. */
11440 if (!op1_sbit && op1_ebit && op)
11442 uint32_t record_buf[1];
11444 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11445 if (record_buf[0] == 15)
11446 record_buf[0] = ARM_PS_REGNUM;
11448 arm_insn_r->reg_rec_count = 1;
11449 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11455 return arm_record_unsupported_insn (arm_insn_r);
11458 /* Handling opcode 000 insns. */
11461 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11463 uint32_t record_buf[8];
11464 uint32_t reg_src1 = 0;
11466 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11468 record_buf[0] = ARM_PS_REGNUM;
11469 record_buf[1] = reg_src1;
11470 thumb_insn_r->reg_rec_count = 2;
11472 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11478 /* Handling opcode 001 insns. */
11481 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11483 uint32_t record_buf[8];
11484 uint32_t reg_src1 = 0;
11486 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11488 record_buf[0] = ARM_PS_REGNUM;
11489 record_buf[1] = reg_src1;
11490 thumb_insn_r->reg_rec_count = 2;
11492 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11497 /* Handling opcode 010 insns. */
11500 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11502 struct regcache *reg_cache = thumb_insn_r->regcache;
11503 uint32_t record_buf[8], record_buf_mem[8];
11505 uint32_t reg_src1 = 0, reg_src2 = 0;
11506 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11508 ULONGEST u_regval[2] = {0};
11510 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11512 if (bit (thumb_insn_r->arm_insn, 12))
11514 /* Handle load/store register offset. */
11515 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11516 if (opcode2 >= 12 && opcode2 <= 15)
11518 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11519 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11520 record_buf[0] = reg_src1;
11521 thumb_insn_r->reg_rec_count = 1;
11523 else if (opcode2 >= 8 && opcode2 <= 10)
11525 /* STR(2), STRB(2), STRH(2) . */
11526 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11527 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11528 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11529 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11531 record_buf_mem[0] = 4; /* STR (2). */
11532 else if (10 == opcode2)
11533 record_buf_mem[0] = 1; /* STRB (2). */
11534 else if (9 == opcode2)
11535 record_buf_mem[0] = 2; /* STRH (2). */
11536 record_buf_mem[1] = u_regval[0] + u_regval[1];
11537 thumb_insn_r->mem_rec_count = 1;
11540 else if (bit (thumb_insn_r->arm_insn, 11))
11542 /* Handle load from literal pool. */
11544 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11545 record_buf[0] = reg_src1;
11546 thumb_insn_r->reg_rec_count = 1;
11550 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11551 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11552 if ((3 == opcode2) && (!opcode3))
11554 /* Branch with exchange. */
11555 record_buf[0] = ARM_PS_REGNUM;
11556 thumb_insn_r->reg_rec_count = 1;
11560 /* Format 8; special data processing insns. */
11561 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11562 record_buf[0] = ARM_PS_REGNUM;
11563 record_buf[1] = reg_src1;
11564 thumb_insn_r->reg_rec_count = 2;
11569 /* Format 5; data processing insns. */
11570 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11571 if (bit (thumb_insn_r->arm_insn, 7))
11573 reg_src1 = reg_src1 + 8;
11575 record_buf[0] = ARM_PS_REGNUM;
11576 record_buf[1] = reg_src1;
11577 thumb_insn_r->reg_rec_count = 2;
11580 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11581 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11587 /* Handling opcode 001 insns. */
11590 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11592 struct regcache *reg_cache = thumb_insn_r->regcache;
11593 uint32_t record_buf[8], record_buf_mem[8];
11595 uint32_t reg_src1 = 0;
11596 uint32_t opcode = 0, immed_5 = 0;
11598 ULONGEST u_regval = 0;
11600 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11605 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11606 record_buf[0] = reg_src1;
11607 thumb_insn_r->reg_rec_count = 1;
11612 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11613 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11614 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11615 record_buf_mem[0] = 4;
11616 record_buf_mem[1] = u_regval + (immed_5 * 4);
11617 thumb_insn_r->mem_rec_count = 1;
11620 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11621 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11627 /* Handling opcode 100 insns. */
11630 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11632 struct regcache *reg_cache = thumb_insn_r->regcache;
11633 uint32_t record_buf[8], record_buf_mem[8];
11635 uint32_t reg_src1 = 0;
11636 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11638 ULONGEST u_regval = 0;
11640 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11645 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11646 record_buf[0] = reg_src1;
11647 thumb_insn_r->reg_rec_count = 1;
11649 else if (1 == opcode)
11652 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11653 record_buf[0] = reg_src1;
11654 thumb_insn_r->reg_rec_count = 1;
11656 else if (2 == opcode)
11659 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11660 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11661 record_buf_mem[0] = 4;
11662 record_buf_mem[1] = u_regval + (immed_8 * 4);
11663 thumb_insn_r->mem_rec_count = 1;
11665 else if (0 == opcode)
11668 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11669 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11670 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11671 record_buf_mem[0] = 2;
11672 record_buf_mem[1] = u_regval + (immed_5 * 2);
11673 thumb_insn_r->mem_rec_count = 1;
11676 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11677 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11683 /* Handling opcode 101 insns. */
11686 thumb_record_misc (insn_decode_record *thumb_insn_r)
11688 struct regcache *reg_cache = thumb_insn_r->regcache;
11690 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11691 uint32_t register_bits = 0, register_count = 0;
11692 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11693 uint32_t record_buf[24], record_buf_mem[48];
11696 ULONGEST u_regval = 0;
11698 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11699 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11700 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11705 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11706 while (register_bits)
11708 if (register_bits & 0x00000001)
11709 record_buf[index++] = register_count;
11710 register_bits = register_bits >> 1;
11713 record_buf[index++] = ARM_PS_REGNUM;
11714 record_buf[index++] = ARM_SP_REGNUM;
11715 thumb_insn_r->reg_rec_count = index;
11717 else if (10 == opcode2)
11720 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11721 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11722 while (register_bits)
11724 if (register_bits & 0x00000001)
11726 register_bits = register_bits >> 1;
11728 start_address = u_regval - \
11729 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11730 thumb_insn_r->mem_rec_count = register_count;
11731 while (register_count)
11733 record_buf_mem[(register_count * 2) - 1] = start_address;
11734 record_buf_mem[(register_count * 2) - 2] = 4;
11735 start_address = start_address + 4;
11738 record_buf[0] = ARM_SP_REGNUM;
11739 thumb_insn_r->reg_rec_count = 1;
11741 else if (0x1E == opcode1)
11744 /* Handle enhanced software breakpoint insn, BKPT. */
11745 /* CPSR is changed to be executed in ARM state, disabling normal
11746 interrupts, entering abort mode. */
11747 /* According to high vector configuration PC is set. */
11748 /* User hits breakpoint and type reverse, in that case, we need to go back with
11749 previous CPSR and Program Counter. */
11750 record_buf[0] = ARM_PS_REGNUM;
11751 record_buf[1] = ARM_LR_REGNUM;
11752 thumb_insn_r->reg_rec_count = 2;
11753 /* We need to save SPSR value, which is not yet done. */
11754 printf_unfiltered (_("Process record does not support instruction "
11755 "0x%0x at address %s.\n"),
11756 thumb_insn_r->arm_insn,
11757 paddress (thumb_insn_r->gdbarch,
11758 thumb_insn_r->this_addr));
11761 else if ((0 == opcode) || (1 == opcode))
11763 /* ADD(5), ADD(6). */
11764 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11765 record_buf[0] = reg_src1;
11766 thumb_insn_r->reg_rec_count = 1;
11768 else if (2 == opcode)
11770 /* ADD(7), SUB(4). */
11771 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11772 record_buf[0] = ARM_SP_REGNUM;
11773 thumb_insn_r->reg_rec_count = 1;
11776 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11777 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11783 /* Handling opcode 110 insns. */
11786 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11788 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11789 struct regcache *reg_cache = thumb_insn_r->regcache;
11791 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11792 uint32_t reg_src1 = 0;
11793 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11794 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11795 uint32_t record_buf[24], record_buf_mem[48];
11797 ULONGEST u_regval = 0;
11799 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11800 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11806 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11808 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11809 while (register_bits)
11811 if (register_bits & 0x00000001)
11812 record_buf[index++] = register_count;
11813 register_bits = register_bits >> 1;
11816 record_buf[index++] = reg_src1;
11817 thumb_insn_r->reg_rec_count = index;
11819 else if (0 == opcode2)
11821 /* It handles both STMIA. */
11822 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11824 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11825 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11826 while (register_bits)
11828 if (register_bits & 0x00000001)
11830 register_bits = register_bits >> 1;
11832 start_address = u_regval;
11833 thumb_insn_r->mem_rec_count = register_count;
11834 while (register_count)
11836 record_buf_mem[(register_count * 2) - 1] = start_address;
11837 record_buf_mem[(register_count * 2) - 2] = 4;
11838 start_address = start_address + 4;
11842 else if (0x1F == opcode1)
11844 /* Handle arm syscall insn. */
11845 if (tdep->arm_syscall_record != NULL)
11847 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11848 ret = tdep->arm_syscall_record (reg_cache, u_regval);
11852 printf_unfiltered (_("no syscall record support\n"));
11857 /* B (1), conditional branch is automatically taken care in process_record,
11858 as PC is saved there. */
11860 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11861 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11867 /* Handling opcode 111 insns. */
11870 thumb_record_branch (insn_decode_record *thumb_insn_r)
11872 uint32_t record_buf[8];
11873 uint32_t bits_h = 0;
11875 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
11877 if (2 == bits_h || 3 == bits_h)
11880 record_buf[0] = ARM_LR_REGNUM;
11881 thumb_insn_r->reg_rec_count = 1;
11883 else if (1 == bits_h)
11886 record_buf[0] = ARM_PS_REGNUM;
11887 record_buf[1] = ARM_LR_REGNUM;
11888 thumb_insn_r->reg_rec_count = 2;
11891 /* B(2) is automatically taken care in process_record, as PC is
11894 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11899 /* Handler for thumb2 load/store multiple instructions. */
11902 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
11904 struct regcache *reg_cache = thumb2_insn_r->regcache;
11906 uint32_t reg_rn, op;
11907 uint32_t register_bits = 0, register_count = 0;
11908 uint32_t index = 0, start_address = 0;
11909 uint32_t record_buf[24], record_buf_mem[48];
11911 ULONGEST u_regval = 0;
11913 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11914 op = bits (thumb2_insn_r->arm_insn, 23, 24);
11916 if (0 == op || 3 == op)
11918 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11920 /* Handle RFE instruction. */
11921 record_buf[0] = ARM_PS_REGNUM;
11922 thumb2_insn_r->reg_rec_count = 1;
11926 /* Handle SRS instruction after reading banked SP. */
11927 return arm_record_unsupported_insn (thumb2_insn_r);
11930 else if (1 == op || 2 == op)
11932 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11934 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
11935 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11936 while (register_bits)
11938 if (register_bits & 0x00000001)
11939 record_buf[index++] = register_count;
11942 register_bits = register_bits >> 1;
11944 record_buf[index++] = reg_rn;
11945 record_buf[index++] = ARM_PS_REGNUM;
11946 thumb2_insn_r->reg_rec_count = index;
11950 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
11951 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11952 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11953 while (register_bits)
11955 if (register_bits & 0x00000001)
11958 register_bits = register_bits >> 1;
11963 /* Start address calculation for LDMDB/LDMEA. */
11964 start_address = u_regval;
11968 /* Start address calculation for LDMDB/LDMEA. */
11969 start_address = u_regval - register_count * 4;
11972 thumb2_insn_r->mem_rec_count = register_count;
11973 while (register_count)
11975 record_buf_mem[register_count * 2 - 1] = start_address;
11976 record_buf_mem[register_count * 2 - 2] = 4;
11977 start_address = start_address + 4;
11980 record_buf[0] = reg_rn;
11981 record_buf[1] = ARM_PS_REGNUM;
11982 thumb2_insn_r->reg_rec_count = 2;
11986 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
11988 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
11990 return ARM_RECORD_SUCCESS;
11993 /* Handler for thumb2 load/store (dual/exclusive) and table branch
11997 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
11999 struct regcache *reg_cache = thumb2_insn_r->regcache;
12001 uint32_t reg_rd, reg_rn, offset_imm;
12002 uint32_t reg_dest1, reg_dest2;
12003 uint32_t address, offset_addr;
12004 uint32_t record_buf[8], record_buf_mem[8];
12005 uint32_t op1, op2, op3;
12008 ULONGEST u_regval[2];
12010 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12011 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12012 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12014 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12016 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12018 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12019 record_buf[0] = reg_dest1;
12020 record_buf[1] = ARM_PS_REGNUM;
12021 thumb2_insn_r->reg_rec_count = 2;
12024 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12026 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12027 record_buf[2] = reg_dest2;
12028 thumb2_insn_r->reg_rec_count = 3;
12033 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12034 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12036 if (0 == op1 && 0 == op2)
12038 /* Handle STREX. */
12039 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12040 address = u_regval[0] + (offset_imm * 4);
12041 record_buf_mem[0] = 4;
12042 record_buf_mem[1] = address;
12043 thumb2_insn_r->mem_rec_count = 1;
12044 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12045 record_buf[0] = reg_rd;
12046 thumb2_insn_r->reg_rec_count = 1;
12048 else if (1 == op1 && 0 == op2)
12050 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12051 record_buf[0] = reg_rd;
12052 thumb2_insn_r->reg_rec_count = 1;
12053 address = u_regval[0];
12054 record_buf_mem[1] = address;
12058 /* Handle STREXB. */
12059 record_buf_mem[0] = 1;
12060 thumb2_insn_r->mem_rec_count = 1;
12064 /* Handle STREXH. */
12065 record_buf_mem[0] = 2 ;
12066 thumb2_insn_r->mem_rec_count = 1;
12070 /* Handle STREXD. */
12071 address = u_regval[0];
12072 record_buf_mem[0] = 4;
12073 record_buf_mem[2] = 4;
12074 record_buf_mem[3] = address + 4;
12075 thumb2_insn_r->mem_rec_count = 2;
12080 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12082 if (bit (thumb2_insn_r->arm_insn, 24))
12084 if (bit (thumb2_insn_r->arm_insn, 23))
12085 offset_addr = u_regval[0] + (offset_imm * 4);
12087 offset_addr = u_regval[0] - (offset_imm * 4);
12089 address = offset_addr;
12092 address = u_regval[0];
12094 record_buf_mem[0] = 4;
12095 record_buf_mem[1] = address;
12096 record_buf_mem[2] = 4;
12097 record_buf_mem[3] = address + 4;
12098 thumb2_insn_r->mem_rec_count = 2;
12099 record_buf[0] = reg_rn;
12100 thumb2_insn_r->reg_rec_count = 1;
12104 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12106 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12108 return ARM_RECORD_SUCCESS;
12111 /* Handler for thumb2 data processing (shift register and modified immediate)
12115 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12117 uint32_t reg_rd, op;
12118 uint32_t record_buf[8];
12120 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12121 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12123 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12125 record_buf[0] = ARM_PS_REGNUM;
12126 thumb2_insn_r->reg_rec_count = 1;
12130 record_buf[0] = reg_rd;
12131 record_buf[1] = ARM_PS_REGNUM;
12132 thumb2_insn_r->reg_rec_count = 2;
12135 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12137 return ARM_RECORD_SUCCESS;
12140 /* Generic handler for thumb2 instructions which effect destination and PS
12144 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12147 uint32_t record_buf[8];
12149 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12151 record_buf[0] = reg_rd;
12152 record_buf[1] = ARM_PS_REGNUM;
12153 thumb2_insn_r->reg_rec_count = 2;
12155 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12157 return ARM_RECORD_SUCCESS;
12160 /* Handler for thumb2 branch and miscellaneous control instructions. */
12163 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12165 uint32_t op, op1, op2;
12166 uint32_t record_buf[8];
12168 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12169 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12170 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12172 /* Handle MSR insn. */
12173 if (!(op1 & 0x2) && 0x38 == op)
12177 /* CPSR is going to be changed. */
12178 record_buf[0] = ARM_PS_REGNUM;
12179 thumb2_insn_r->reg_rec_count = 1;
12183 arm_record_unsupported_insn(thumb2_insn_r);
12187 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12190 record_buf[0] = ARM_PS_REGNUM;
12191 record_buf[1] = ARM_LR_REGNUM;
12192 thumb2_insn_r->reg_rec_count = 2;
12195 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12197 return ARM_RECORD_SUCCESS;
12200 /* Handler for thumb2 store single data item instructions. */
12203 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12205 struct regcache *reg_cache = thumb2_insn_r->regcache;
12207 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12208 uint32_t address, offset_addr;
12209 uint32_t record_buf[8], record_buf_mem[8];
12212 ULONGEST u_regval[2];
12214 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12215 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12216 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12217 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12219 if (bit (thumb2_insn_r->arm_insn, 23))
12222 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12223 offset_addr = u_regval[0] + offset_imm;
12224 address = offset_addr;
12229 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12231 /* Handle STRB (register). */
12232 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12233 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12234 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12235 offset_addr = u_regval[1] << shift_imm;
12236 address = u_regval[0] + offset_addr;
12240 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12241 if (bit (thumb2_insn_r->arm_insn, 10))
12243 if (bit (thumb2_insn_r->arm_insn, 9))
12244 offset_addr = u_regval[0] + offset_imm;
12246 offset_addr = u_regval[0] - offset_imm;
12248 address = offset_addr;
12251 address = u_regval[0];
12257 /* Store byte instructions. */
12260 record_buf_mem[0] = 1;
12262 /* Store half word instructions. */
12265 record_buf_mem[0] = 2;
12267 /* Store word instructions. */
12270 record_buf_mem[0] = 4;
12274 gdb_assert_not_reached ("no decoding pattern found");
12278 record_buf_mem[1] = address;
12279 thumb2_insn_r->mem_rec_count = 1;
12280 record_buf[0] = reg_rn;
12281 thumb2_insn_r->reg_rec_count = 1;
12283 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12285 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12287 return ARM_RECORD_SUCCESS;
12290 /* Handler for thumb2 load memory hints instructions. */
12293 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12295 uint32_t record_buf[8];
12296 uint32_t reg_rt, reg_rn;
12298 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12299 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12301 if (ARM_PC_REGNUM != reg_rt)
12303 record_buf[0] = reg_rt;
12304 record_buf[1] = reg_rn;
12305 record_buf[2] = ARM_PS_REGNUM;
12306 thumb2_insn_r->reg_rec_count = 3;
12308 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12310 return ARM_RECORD_SUCCESS;
12313 return ARM_RECORD_FAILURE;
12316 /* Handler for thumb2 load word instructions. */
12319 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12321 uint32_t opcode1 = 0, opcode2 = 0;
12322 uint32_t record_buf[8];
12324 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12325 record_buf[1] = ARM_PS_REGNUM;
12326 thumb2_insn_r->reg_rec_count = 2;
12328 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12330 return ARM_RECORD_SUCCESS;
12333 /* Handler for thumb2 long multiply, long multiply accumulate, and
12334 divide instructions. */
12337 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12339 uint32_t opcode1 = 0, opcode2 = 0;
12340 uint32_t record_buf[8];
12341 uint32_t reg_src1 = 0;
12343 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12344 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12346 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12348 /* Handle SMULL, UMULL, SMULAL. */
12349 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12350 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12351 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12352 record_buf[2] = ARM_PS_REGNUM;
12353 thumb2_insn_r->reg_rec_count = 3;
12355 else if (1 == opcode1 || 3 == opcode2)
12357 /* Handle SDIV and UDIV. */
12358 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12359 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12360 record_buf[2] = ARM_PS_REGNUM;
12361 thumb2_insn_r->reg_rec_count = 3;
12364 return ARM_RECORD_FAILURE;
12366 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12368 return ARM_RECORD_SUCCESS;
12371 /* Record handler for thumb32 coprocessor instructions. */
12374 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12376 if (bit (thumb2_insn_r->arm_insn, 25))
12377 return arm_record_coproc_data_proc (thumb2_insn_r);
12379 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12382 /* Record handler for advance SIMD structure load/store instructions. */
12385 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12387 struct regcache *reg_cache = thumb2_insn_r->regcache;
12388 uint32_t l_bit, a_bit, b_bits;
12389 uint32_t record_buf[128], record_buf_mem[128];
12390 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
12391 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12394 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12395 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12396 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12397 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12398 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12399 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12400 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12401 f_esize = 8 * f_ebytes;
12402 f_elem = 8 / f_ebytes;
12406 ULONGEST u_regval = 0;
12407 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12408 address = u_regval;
12413 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12415 if (b_bits == 0x07)
12417 else if (b_bits == 0x0a)
12419 else if (b_bits == 0x06)
12421 else if (b_bits == 0x02)
12426 for (index_r = 0; index_r < bf_regs; index_r++)
12428 for (index_e = 0; index_e < f_elem; index_e++)
12430 record_buf_mem[index_m++] = f_ebytes;
12431 record_buf_mem[index_m++] = address;
12432 address = address + f_ebytes;
12433 thumb2_insn_r->mem_rec_count += 1;
12438 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12440 if (b_bits == 0x09 || b_bits == 0x08)
12442 else if (b_bits == 0x03)
12447 for (index_r = 0; index_r < bf_regs; index_r++)
12448 for (index_e = 0; index_e < f_elem; index_e++)
12450 for (loop_t = 0; loop_t < 2; loop_t++)
12452 record_buf_mem[index_m++] = f_ebytes;
12453 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12454 thumb2_insn_r->mem_rec_count += 1;
12456 address = address + (2 * f_ebytes);
12460 else if ((b_bits & 0x0e) == 0x04)
12462 for (index_e = 0; index_e < f_elem; index_e++)
12464 for (loop_t = 0; loop_t < 3; loop_t++)
12466 record_buf_mem[index_m++] = f_ebytes;
12467 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12468 thumb2_insn_r->mem_rec_count += 1;
12470 address = address + (3 * f_ebytes);
12474 else if (!(b_bits & 0x0e))
12476 for (index_e = 0; index_e < f_elem; index_e++)
12478 for (loop_t = 0; loop_t < 4; loop_t++)
12480 record_buf_mem[index_m++] = f_ebytes;
12481 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12482 thumb2_insn_r->mem_rec_count += 1;
12484 address = address + (4 * f_ebytes);
12490 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12492 if (bft_size == 0x00)
12494 else if (bft_size == 0x01)
12496 else if (bft_size == 0x02)
12502 if (!(b_bits & 0x0b) || b_bits == 0x08)
12503 thumb2_insn_r->mem_rec_count = 1;
12505 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12506 thumb2_insn_r->mem_rec_count = 2;
12508 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12509 thumb2_insn_r->mem_rec_count = 3;
12511 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12512 thumb2_insn_r->mem_rec_count = 4;
12514 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12516 record_buf_mem[index_m] = f_ebytes;
12517 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12526 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12527 thumb2_insn_r->reg_rec_count = 1;
12529 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12530 thumb2_insn_r->reg_rec_count = 2;
12532 else if ((b_bits & 0x0e) == 0x04)
12533 thumb2_insn_r->reg_rec_count = 3;
12535 else if (!(b_bits & 0x0e))
12536 thumb2_insn_r->reg_rec_count = 4;
12541 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12542 thumb2_insn_r->reg_rec_count = 1;
12544 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12545 thumb2_insn_r->reg_rec_count = 2;
12547 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12548 thumb2_insn_r->reg_rec_count = 3;
12550 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12551 thumb2_insn_r->reg_rec_count = 4;
12553 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12554 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12558 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12560 record_buf[index_r] = reg_rn;
12561 thumb2_insn_r->reg_rec_count += 1;
12564 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12566 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12571 /* Decodes thumb2 instruction type and invokes its record handler. */
12573 static unsigned int
12574 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12576 uint32_t op, op1, op2;
12578 op = bit (thumb2_insn_r->arm_insn, 15);
12579 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12580 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12584 if (!(op2 & 0x64 ))
12586 /* Load/store multiple instruction. */
12587 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12589 else if (!((op2 & 0x64) ^ 0x04))
12591 /* Load/store (dual/exclusive) and table branch instruction. */
12592 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12594 else if (!((op2 & 0x20) ^ 0x20))
12596 /* Data-processing (shifted register). */
12597 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12599 else if (op2 & 0x40)
12601 /* Co-processor instructions. */
12602 return thumb2_record_coproc_insn (thumb2_insn_r);
12605 else if (op1 == 0x02)
12609 /* Branches and miscellaneous control instructions. */
12610 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12612 else if (op2 & 0x20)
12614 /* Data-processing (plain binary immediate) instruction. */
12615 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12619 /* Data-processing (modified immediate). */
12620 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12623 else if (op1 == 0x03)
12625 if (!(op2 & 0x71 ))
12627 /* Store single data item. */
12628 return thumb2_record_str_single_data (thumb2_insn_r);
12630 else if (!((op2 & 0x71) ^ 0x10))
12632 /* Advanced SIMD or structure load/store instructions. */
12633 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12635 else if (!((op2 & 0x67) ^ 0x01))
12637 /* Load byte, memory hints instruction. */
12638 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12640 else if (!((op2 & 0x67) ^ 0x03))
12642 /* Load halfword, memory hints instruction. */
12643 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12645 else if (!((op2 & 0x67) ^ 0x05))
12647 /* Load word instruction. */
12648 return thumb2_record_ld_word (thumb2_insn_r);
12650 else if (!((op2 & 0x70) ^ 0x20))
12652 /* Data-processing (register) instruction. */
12653 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12655 else if (!((op2 & 0x78) ^ 0x30))
12657 /* Multiply, multiply accumulate, abs diff instruction. */
12658 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12660 else if (!((op2 & 0x78) ^ 0x38))
12662 /* Long multiply, long multiply accumulate, and divide. */
12663 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12665 else if (op2 & 0x40)
12667 /* Co-processor instructions. */
12668 return thumb2_record_coproc_insn (thumb2_insn_r);
12675 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12676 and positive val on fauilure. */
12679 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12681 gdb_byte buf[insn_size];
12683 memset (&buf[0], 0, insn_size);
12685 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12687 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12689 gdbarch_byte_order_for_code (insn_record->gdbarch));
12693 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12695 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12699 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12700 uint32_t insn_size)
12703 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
12704 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12706 arm_record_data_proc_misc_ld_str, /* 000. */
12707 arm_record_data_proc_imm, /* 001. */
12708 arm_record_ld_st_imm_offset, /* 010. */
12709 arm_record_ld_st_reg_offset, /* 011. */
12710 arm_record_ld_st_multiple, /* 100. */
12711 arm_record_b_bl, /* 101. */
12712 arm_record_asimd_vfp_coproc, /* 110. */
12713 arm_record_coproc_data_proc /* 111. */
12716 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
12717 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12719 thumb_record_shift_add_sub, /* 000. */
12720 thumb_record_add_sub_cmp_mov, /* 001. */
12721 thumb_record_ld_st_reg_offset, /* 010. */
12722 thumb_record_ld_st_imm_offset, /* 011. */
12723 thumb_record_ld_st_stack, /* 100. */
12724 thumb_record_misc, /* 101. */
12725 thumb_record_ldm_stm_swi, /* 110. */
12726 thumb_record_branch /* 111. */
12729 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12730 uint32_t insn_id = 0;
12732 if (extract_arm_insn (arm_record, insn_size))
12736 printf_unfiltered (_("Process record: error reading memory at "
12737 "addr %s len = %d.\n"),
12738 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12742 else if (ARM_RECORD == record_type)
12744 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12745 insn_id = bits (arm_record->arm_insn, 25, 27);
12746 ret = arm_record_extension_space (arm_record);
12747 /* If this insn has fallen into extension space
12748 then we need not decode it anymore. */
12749 if (ret != -1 && !INSN_RECORDED(arm_record))
12751 ret = arm_handle_insn[insn_id] (arm_record);
12754 else if (THUMB_RECORD == record_type)
12756 /* As thumb does not have condition codes, we set negative. */
12757 arm_record->cond = -1;
12758 insn_id = bits (arm_record->arm_insn, 13, 15);
12759 ret = thumb_handle_insn[insn_id] (arm_record);
12761 else if (THUMB2_RECORD == record_type)
12763 /* As thumb does not have condition codes, we set negative. */
12764 arm_record->cond = -1;
12766 /* Swap first half of 32bit thumb instruction with second half. */
12767 arm_record->arm_insn
12768 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12770 insn_id = thumb2_record_decode_insn_handler (arm_record);
12772 if (insn_id != ARM_RECORD_SUCCESS)
12774 arm_record_unsupported_insn (arm_record);
12780 /* Throw assertion. */
12781 gdb_assert_not_reached ("not a valid instruction, could not decode");
12788 /* Cleans up local record registers and memory allocations. */
12791 deallocate_reg_mem (insn_decode_record *record)
12793 xfree (record->arm_regs);
12794 xfree (record->arm_mems);
12798 /* Parse the current instruction and record the values of the registers and
12799 memory that will be changed in current instruction to record_arch_list".
12800 Return -1 if something is wrong. */
12803 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12804 CORE_ADDR insn_addr)
12807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12808 uint32_t no_of_rec = 0;
12809 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12810 ULONGEST t_bit = 0, insn_id = 0;
12812 ULONGEST u_regval = 0;
12814 insn_decode_record arm_record;
12816 memset (&arm_record, 0, sizeof (insn_decode_record));
12817 arm_record.regcache = regcache;
12818 arm_record.this_addr = insn_addr;
12819 arm_record.gdbarch = gdbarch;
12822 if (record_debug > 1)
12824 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12826 paddress (gdbarch, arm_record.this_addr));
12829 if (extract_arm_insn (&arm_record, 2))
12833 printf_unfiltered (_("Process record: error reading memory at "
12834 "addr %s len = %d.\n"),
12835 paddress (arm_record.gdbarch,
12836 arm_record.this_addr), 2);
12841 /* Check the insn, whether it is thumb or arm one. */
12843 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12844 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12847 if (!(u_regval & t_bit))
12849 /* We are decoding arm insn. */
12850 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12854 insn_id = bits (arm_record.arm_insn, 11, 15);
12855 /* is it thumb2 insn? */
12856 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12858 ret = decode_insn (&arm_record, THUMB2_RECORD,
12859 THUMB2_INSN_SIZE_BYTES);
12863 /* We are decoding thumb insn. */
12864 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12870 /* Record registers. */
12871 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12872 if (arm_record.arm_regs)
12874 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12876 if (record_full_arch_list_add_reg
12877 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
12881 /* Record memories. */
12882 if (arm_record.arm_mems)
12884 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12886 if (record_full_arch_list_add_mem
12887 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12888 arm_record.arm_mems[no_of_rec].len))
12893 if (record_full_arch_list_add_end ())
12898 deallocate_reg_mem (&arm_record);