1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2/frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
51 #include "arch/arm-get-next-pcs.h"
53 #include "gdb/sim-arm.h"
56 #include "coff/internal.h"
60 #include "record-full.h"
66 #include "gdbsupport/selftest.h"
69 static bool arm_debug
;
71 /* Print an "arm" debug statement. */
73 #define arm_debug_printf(fmt, ...) \
74 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76 /* Macros for setting and testing a bit in a minimal symbol that marks
77 it as Thumb function. The MSB of the minimal symbol's "info" field
78 is used for this purpose.
80 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
81 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83 #define MSYMBOL_SET_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym) = 1
86 #define MSYMBOL_IS_SPECIAL(msym) \
87 MSYMBOL_TARGET_FLAG_1 (msym)
89 struct arm_mapping_symbol
94 bool operator< (const arm_mapping_symbol
&other
) const
95 { return this->value
< other
.value
; }
98 typedef std::vector
<arm_mapping_symbol
> arm_mapping_symbol_vec
;
102 explicit arm_per_bfd (size_t num_sections
)
103 : section_maps (new arm_mapping_symbol_vec
[num_sections
]),
104 section_maps_sorted (new bool[num_sections
] ())
107 DISABLE_COPY_AND_ASSIGN (arm_per_bfd
);
109 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111 The format is an array of vectors of arm_mapping_symbols, there is one
112 vector for each section of the objfile (the array is index by BFD section
115 For each section, the vector of arm_mapping_symbol is sorted by
116 symbol value (address). */
117 std::unique_ptr
<arm_mapping_symbol_vec
[]> section_maps
;
119 /* For each corresponding element of section_maps above, is this vector
121 std::unique_ptr
<bool[]> section_maps_sorted
;
124 /* Per-bfd data used for mapping symbols. */
125 static bfd_key
<arm_per_bfd
> arm_bfd_data_key
;
127 /* The list of available "set arm ..." and "show arm ..." commands. */
128 static struct cmd_list_element
*setarmcmdlist
= NULL
;
129 static struct cmd_list_element
*showarmcmdlist
= NULL
;
131 /* The type of floating-point to use. Keep this in sync with enum
132 arm_float_model, and the help string in _initialize_arm_tdep. */
133 static const char *const fp_model_strings
[] =
143 /* A variable that can be configured by the user. */
144 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
145 static const char *current_fp_model
= "auto";
147 /* The ABI to use. Keep this in sync with arm_abi_kind. */
148 static const char *const arm_abi_strings
[] =
156 /* A variable that can be configured by the user. */
157 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
158 static const char *arm_abi_string
= "auto";
160 /* The execution mode to assume. */
161 static const char *const arm_mode_strings
[] =
169 static const char *arm_fallback_mode_string
= "auto";
170 static const char *arm_force_mode_string
= "auto";
172 /* The standard register names, and all the valid aliases for them. Note
173 that `fp', `sp' and `pc' are not added in this alias list, because they
174 have been added as builtin user registers in
175 std-regs.c:_initialize_frame_reg. */
180 } arm_register_aliases
[] = {
181 /* Basic register numbers. */
198 /* Synonyms (argument and variable registers). */
211 /* Other platform-specific names for r9. */
217 /* Names used by GCC (not listed in the ARM EABI). */
219 /* A special name from the older ATPCS. */
223 static const char *const arm_register_names
[] =
224 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
225 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
226 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
227 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
228 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
229 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
230 "fps", "cpsr" }; /* 24 25 */
232 /* Holds the current set of options to be passed to the disassembler. */
233 static char *arm_disassembler_options
;
235 /* Valid register name styles. */
236 static const char **valid_disassembly_styles
;
238 /* Disassembly style to use. Default to "std" register names. */
239 static const char *disassembly_style
;
241 /* All possible arm target descriptors. */
242 static struct target_desc
*tdesc_arm_list
[ARM_FP_TYPE_INVALID
];
243 static struct target_desc
*tdesc_arm_mprofile_list
[ARM_M_TYPE_INVALID
];
245 /* This is used to keep the bfd arch_info in sync with the disassembly
247 static void set_disassembly_style_sfunc (const char *, int,
248 struct cmd_list_element
*);
249 static void show_disassembly_style_sfunc (struct ui_file
*, int,
250 struct cmd_list_element
*,
253 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
254 readable_regcache
*regcache
,
255 int regnum
, gdb_byte
*buf
);
256 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
257 struct regcache
*regcache
,
258 int regnum
, const gdb_byte
*buf
);
261 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
264 /* get_next_pcs operations. */
265 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
266 arm_get_next_pcs_read_memory_unsigned_integer
,
267 arm_get_next_pcs_syscall_next_pc
,
268 arm_get_next_pcs_addr_bits_remove
,
269 arm_get_next_pcs_is_thumb
,
273 struct arm_prologue_cache
275 /* The stack pointer at the time this frame was created; i.e. the
276 caller's stack pointer when this function was called. It is used
277 to identify this frame. */
280 /* The frame base for this frame is just prev_sp - frame size.
281 FRAMESIZE is the distance from the frame pointer to the
282 initial stack pointer. */
286 /* The register used to hold the frame pointer for this frame. */
289 /* Saved register offsets. */
290 trad_frame_saved_reg
*saved_regs
;
295 /* Abstract class to read ARM instructions from memory. */
297 class arm_instruction_reader
300 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
301 virtual uint32_t read (CORE_ADDR memaddr
, bfd_endian byte_order
) const = 0;
304 /* Read instructions from target memory. */
306 class target_arm_instruction_reader
: public arm_instruction_reader
309 uint32_t read (CORE_ADDR memaddr
, bfd_endian byte_order
) const override
311 return read_code_unsigned_integer (memaddr
, 4, byte_order
);
317 static CORE_ADDR arm_analyze_prologue
318 (struct gdbarch
*gdbarch
, CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
319 struct arm_prologue_cache
*cache
, const arm_instruction_reader
&insn_reader
);
321 /* Architecture version for displaced stepping. This effects the behaviour of
322 certain instructions, and really should not be hard-wired. */
324 #define DISPLACED_STEPPING_ARCH_VERSION 5
326 /* See arm-tdep.h. */
328 bool arm_apcs_32
= true;
330 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
333 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
335 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
343 /* Determine if the processor is currently executing in Thumb mode. */
346 arm_is_thumb (struct regcache
*regcache
)
349 ULONGEST t_bit
= arm_psr_thumb_bit (regcache
->arch ());
351 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
353 return (cpsr
& t_bit
) != 0;
356 /* Determine if FRAME is executing in Thumb mode. */
359 arm_frame_is_thumb (struct frame_info
*frame
)
362 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
364 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
365 directly (from a signal frame or dummy frame) or by interpreting
366 the saved LR (from a prologue or DWARF frame). So consult it and
367 trust the unwinders. */
368 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
370 return (cpsr
& t_bit
) != 0;
373 /* Search for the mapping symbol covering MEMADDR. If one is found,
374 return its type. Otherwise, return 0. If START is non-NULL,
375 set *START to the location of the mapping symbol. */
378 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
380 struct obj_section
*sec
;
382 /* If there are mapping symbols, consult them. */
383 sec
= find_pc_section (memaddr
);
386 arm_per_bfd
*data
= arm_bfd_data_key
.get (sec
->objfile
->obfd
);
389 unsigned int section_idx
= sec
->the_bfd_section
->index
;
390 arm_mapping_symbol_vec
&map
391 = data
->section_maps
[section_idx
];
393 /* Sort the vector on first use. */
394 if (!data
->section_maps_sorted
[section_idx
])
396 std::sort (map
.begin (), map
.end ());
397 data
->section_maps_sorted
[section_idx
] = true;
400 arm_mapping_symbol map_key
= { memaddr
- sec
->addr (), 0 };
401 arm_mapping_symbol_vec::const_iterator it
402 = std::lower_bound (map
.begin (), map
.end (), map_key
);
404 /* std::lower_bound finds the earliest ordered insertion
405 point. If the symbol at this position starts at this exact
406 address, we use that; otherwise, the preceding
407 mapping symbol covers this address. */
410 if (it
->value
== map_key
.value
)
413 *start
= it
->value
+ sec
->addr ();
418 if (it
> map
.begin ())
420 arm_mapping_symbol_vec::const_iterator prev_it
424 *start
= prev_it
->value
+ sec
->addr ();
425 return prev_it
->type
;
433 /* Determine if the program counter specified in MEMADDR is in a Thumb
434 function. This function should be called for addresses unrelated to
435 any executing frame; otherwise, prefer arm_frame_is_thumb. */
438 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
440 struct bound_minimal_symbol sym
;
442 arm_displaced_step_copy_insn_closure
*dsc
= nullptr;
443 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
445 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch
))
446 dsc
= ((arm_displaced_step_copy_insn_closure
* )
447 gdbarch_displaced_step_copy_insn_closure_by_addr
448 (gdbarch
, current_inferior (), memaddr
));
450 /* If checking the mode of displaced instruction in copy area, the mode
451 should be determined by instruction on the original address. */
454 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
455 (unsigned long) dsc
->insn_addr
,
456 (unsigned long) memaddr
);
457 memaddr
= dsc
->insn_addr
;
460 /* If bit 0 of the address is set, assume this is a Thumb address. */
461 if (IS_THUMB_ADDR (memaddr
))
464 /* If the user wants to override the symbol table, let him. */
465 if (strcmp (arm_force_mode_string
, "arm") == 0)
467 if (strcmp (arm_force_mode_string
, "thumb") == 0)
470 /* ARM v6-M and v7-M are always in Thumb mode. */
474 /* If there are mapping symbols, consult them. */
475 type
= arm_find_mapping_symbol (memaddr
, NULL
);
479 /* Thumb functions have a "special" bit set in minimal symbols. */
480 sym
= lookup_minimal_symbol_by_pc (memaddr
);
482 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
484 /* If the user wants to override the fallback mode, let them. */
485 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
487 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
490 /* If we couldn't find any symbol, but we're talking to a running
491 target, then trust the current value of $cpsr. This lets
492 "display/i $pc" always show the correct mode (though if there is
493 a symbol table we will not reach here, so it still may not be
494 displayed in the mode it will be executed). */
495 if (target_has_registers ())
496 return arm_frame_is_thumb (get_current_frame ());
498 /* Otherwise we're out of luck; we assume ARM. */
502 /* Determine if the address specified equals any of these magic return
503 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
506 From ARMv6-M Reference Manual B1.5.8
507 Table B1-5 Exception return behavior
509 EXC_RETURN Return To Return Stack
510 0xFFFFFFF1 Handler mode Main
511 0xFFFFFFF9 Thread mode Main
512 0xFFFFFFFD Thread mode Process
514 From ARMv7-M Reference Manual B1.5.8
515 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
517 EXC_RETURN Return To Return Stack
518 0xFFFFFFF1 Handler mode Main
519 0xFFFFFFF9 Thread mode Main
520 0xFFFFFFFD Thread mode Process
522 Table B1-9 EXC_RETURN definition of exception return behavior, with
525 EXC_RETURN Return To Return Stack Frame Type
526 0xFFFFFFE1 Handler mode Main Extended
527 0xFFFFFFE9 Thread mode Main Extended
528 0xFFFFFFED Thread mode Process Extended
529 0xFFFFFFF1 Handler mode Main Basic
530 0xFFFFFFF9 Thread mode Main Basic
531 0xFFFFFFFD Thread mode Process Basic
533 For more details see "B1.5.8 Exception return behavior"
534 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
536 In the ARMv8-M Architecture Technical Reference also adds
537 for implementations without the Security Extension:
540 0xFFFFFFB0 Return to Handler mode.
541 0xFFFFFFB8 Return to Thread mode using the main stack.
542 0xFFFFFFBC Return to Thread mode using the process stack. */
545 arm_m_addr_is_magic (CORE_ADDR addr
)
549 /* Values from ARMv8-M Architecture Technical Reference. */
553 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
554 the exception return behavior. */
561 /* Address is magic. */
565 /* Address is not magic. */
570 /* Remove useless bits from addresses in a running program. */
572 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
574 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
576 /* On M-profile devices, do not strip the low bit from EXC_RETURN
577 (the magic exception return address). */
578 if (tdep
->is_m
&& arm_m_addr_is_magic (val
))
582 return UNMAKE_THUMB_ADDR (val
);
584 return (val
& 0x03fffffc);
587 /* Return 1 if PC is the start of a compiler helper function which
588 can be safely ignored during prologue skipping. IS_THUMB is true
589 if the function is known to be a Thumb function due to the way it
592 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
594 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
595 struct bound_minimal_symbol msym
;
597 msym
= lookup_minimal_symbol_by_pc (pc
);
598 if (msym
.minsym
!= NULL
599 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
600 && msym
.minsym
->linkage_name () != NULL
)
602 const char *name
= msym
.minsym
->linkage_name ();
604 /* The GNU linker's Thumb call stub to foo is named
606 if (strstr (name
, "_from_thumb") != NULL
)
609 /* On soft-float targets, __truncdfsf2 is called to convert promoted
610 arguments to their argument types in non-prototyped
612 if (startswith (name
, "__truncdfsf2"))
614 if (startswith (name
, "__aeabi_d2f"))
617 /* Internal functions related to thread-local storage. */
618 if (startswith (name
, "__tls_get_addr"))
620 if (startswith (name
, "__aeabi_read_tp"))
625 /* If we run against a stripped glibc, we may be unable to identify
626 special functions by name. Check for one important case,
627 __aeabi_read_tp, by comparing the *code* against the default
628 implementation (this is hand-written ARM assembler in glibc). */
631 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
632 == 0xe3e00a0f /* mov r0, #0xffff0fff */
633 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
634 == 0xe240f01f) /* sub pc, r0, #31 */
641 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
642 the first 16-bit of instruction, and INSN2 is the second 16-bit of
644 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
645 ((bits ((insn1), 0, 3) << 12) \
646 | (bits ((insn1), 10, 10) << 11) \
647 | (bits ((insn2), 12, 14) << 8) \
648 | bits ((insn2), 0, 7))
650 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
651 the 32-bit instruction. */
652 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
653 ((bits ((insn), 16, 19) << 12) \
654 | bits ((insn), 0, 11))
656 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
659 thumb_expand_immediate (unsigned int imm
)
661 unsigned int count
= imm
>> 7;
669 return (imm
& 0xff) | ((imm
& 0xff) << 16);
671 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
673 return (imm
& 0xff) | ((imm
& 0xff) << 8)
674 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
677 return (0x80 | (imm
& 0x7f)) << (32 - count
);
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
684 thumb_instruction_restores_sp (unsigned short insn
)
686 return (insn
== 0x46bd /* mov sp, r7 */
687 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
688 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
698 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
699 CORE_ADDR start
, CORE_ADDR limit
,
700 struct arm_prologue_cache
*cache
)
702 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
703 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
707 CORE_ADDR unrecognized_pc
= 0;
709 for (i
= 0; i
< 16; i
++)
710 regs
[i
] = pv_register (i
, 0);
711 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
713 while (start
< limit
)
717 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
719 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
724 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
733 if (mask
& (1 << regno
))
735 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
737 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
740 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
742 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
743 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
746 else if (thumb_instruction_restores_sp (insn
))
748 /* Don't scan past the epilogue. */
751 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
754 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
756 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
758 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
760 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
762 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
764 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
765 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
766 regs
[bits (insn
, 6, 8)]);
767 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
770 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
771 int rm
= bits (insn
, 3, 6);
772 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
774 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
776 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
777 int src_reg
= (insn
& 0x78) >> 3;
778 regs
[dst_reg
] = regs
[src_reg
];
780 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno
= (insn
>> 8) & 0x7;
788 offset
= (insn
& 0xff) << 2;
789 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
791 if (stack
.store_would_trash (addr
))
794 stack
.store (addr
, 4, regs
[regno
]);
796 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
798 int rd
= bits (insn
, 0, 2);
799 int rn
= bits (insn
, 3, 5);
802 offset
= bits (insn
, 6, 10) << 2;
803 addr
= pv_add_constant (regs
[rn
], offset
);
805 if (stack
.store_would_trash (addr
))
808 stack
.store (addr
, 4, regs
[rd
]);
810 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
813 /* Ignore stores of argument registers to the stack. */
815 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
820 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
823 /* Similarly ignore single loads from the stack. */
825 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
830 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
833 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
834 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant
;
840 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
841 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
842 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
844 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
846 unsigned short inst2
;
848 inst2
= read_code_unsigned_integer (start
+ 2, 2,
849 byte_order_for_code
);
851 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
857 int j1
, j2
, imm1
, imm2
;
859 imm1
= sbits (insn
, 0, 10);
860 imm2
= bits (inst2
, 0, 10);
861 j1
= bit (inst2
, 13);
862 j2
= bit (inst2
, 11);
864 offset
= ((imm1
<< 12) + (imm2
<< 1));
865 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
867 nextpc
= start
+ 4 + offset
;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2
, 12) == 0)
870 nextpc
= nextpc
& 0xfffffffc;
872 if (!skip_prologue_function (gdbarch
, nextpc
,
873 bit (inst2
, 12) != 0))
877 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
879 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
881 pv_t addr
= regs
[bits (insn
, 0, 3)];
884 if (stack
.store_would_trash (addr
))
887 /* Calculate offsets of saved registers. */
888 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
889 if (inst2
& (1 << regno
))
891 addr
= pv_add_constant (addr
, -4);
892 stack
.store (addr
, 4, regs
[regno
]);
896 regs
[bits (insn
, 0, 3)] = addr
;
899 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
901 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
903 int regno1
= bits (inst2
, 12, 15);
904 int regno2
= bits (inst2
, 8, 11);
905 pv_t addr
= regs
[bits (insn
, 0, 3)];
907 offset
= inst2
& 0xff;
909 addr
= pv_add_constant (addr
, offset
);
911 addr
= pv_add_constant (addr
, -offset
);
913 if (stack
.store_would_trash (addr
))
916 stack
.store (addr
, 4, regs
[regno1
]);
917 stack
.store (pv_add_constant (addr
, 4),
921 regs
[bits (insn
, 0, 3)] = addr
;
924 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2
& 0x0c00) == 0x0c00
926 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
928 int regno
= bits (inst2
, 12, 15);
929 pv_t addr
= regs
[bits (insn
, 0, 3)];
931 offset
= inst2
& 0xff;
933 addr
= pv_add_constant (addr
, offset
);
935 addr
= pv_add_constant (addr
, -offset
);
937 if (stack
.store_would_trash (addr
))
940 stack
.store (addr
, 4, regs
[regno
]);
943 regs
[bits (insn
, 0, 3)] = addr
;
946 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
949 int regno
= bits (inst2
, 12, 15);
952 offset
= inst2
& 0xfff;
953 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
955 if (stack
.store_would_trash (addr
))
958 stack
.store (addr
, 4, regs
[regno
]);
961 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
963 /* Ignore stores of argument registers to the stack. */
966 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2
& 0x0d00) == 0x0c00
968 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
969 /* Ignore stores of argument registers to the stack. */
972 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
974 && (inst2
& 0x8000) == 0x0000
975 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
980 else if ((insn
& 0xff70) == 0xe950 /* ldrd Rt, Rt2,
982 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
983 /* Similarly ignore dual loads from the stack. */
986 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2
& 0x0d00) == 0x0c00
988 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
994 /* Similarly ignore single loads from the stack. */
997 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2
& 0x8000) == 0x0000)
1000 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1001 | (bits (inst2
, 12, 14) << 8)
1002 | bits (inst2
, 0, 7));
1004 regs
[bits (inst2
, 8, 11)]
1005 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1006 thumb_expand_immediate (imm
));
1009 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2
& 0x8000) == 0x0000)
1012 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1013 | (bits (inst2
, 12, 14) << 8)
1014 | bits (inst2
, 0, 7));
1016 regs
[bits (inst2
, 8, 11)]
1017 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1020 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2
& 0x8000) == 0x0000)
1023 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1024 | (bits (inst2
, 12, 14) << 8)
1025 | bits (inst2
, 0, 7));
1027 regs
[bits (inst2
, 8, 11)]
1028 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1029 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1032 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2
& 0x8000) == 0x0000)
1035 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1036 | (bits (inst2
, 12, 14) << 8)
1037 | bits (inst2
, 0, 7));
1039 regs
[bits (inst2
, 8, 11)]
1040 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1043 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1045 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1046 | (bits (inst2
, 12, 14) << 8)
1047 | bits (inst2
, 0, 7));
1049 regs
[bits (inst2
, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm
));
1053 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1058 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1061 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1062 && (inst2
& 0xf0f0) == 0)
1064 int dst_reg
= (inst2
& 0x0f00) >> 8;
1065 int src_reg
= inst2
& 0xf;
1066 regs
[dst_reg
] = regs
[src_reg
];
1069 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1071 /* Constant pool loads. */
1072 unsigned int constant
;
1075 offset
= bits (inst2
, 0, 11);
1077 loc
= start
+ 4 + offset
;
1079 loc
= start
+ 4 - offset
;
1081 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1082 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1085 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1087 /* Constant pool loads. */
1088 unsigned int constant
;
1091 offset
= bits (inst2
, 0, 7) << 2;
1093 loc
= start
+ 4 + offset
;
1095 loc
= start
+ 4 - offset
;
1097 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1098 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1100 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1101 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1104 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1106 /* Don't scan past anything that might change control flow. */
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc
= start
;
1118 else if (thumb_instruction_changes_pc (insn
))
1120 /* Don't scan past anything that might change control flow. */
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc
= start
;
1133 arm_debug_printf ("Prologue scan stopped at %s",
1134 paddress (gdbarch
, start
));
1136 if (unrecognized_pc
== 0)
1137 unrecognized_pc
= start
;
1140 return unrecognized_pc
;
1142 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1144 /* Frame pointer is fp. Frame size is constant. */
1145 cache
->framereg
= ARM_FP_REGNUM
;
1146 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1148 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1150 /* Frame pointer is r7. Frame size is constant. */
1151 cache
->framereg
= THUMB_FP_REGNUM
;
1152 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1156 /* Try the stack pointer... this is a bit desperate. */
1157 cache
->framereg
= ARM_SP_REGNUM
;
1158 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1161 for (i
= 0; i
< 16; i
++)
1162 if (stack
.find_reg (gdbarch
, i
, &offset
))
1163 cache
->saved_regs
[i
].set_addr (offset
);
1165 return unrecognized_pc
;
1169 /* Try to analyze the instructions starting from PC, which load symbol
1170 __stack_chk_guard. Return the address of instruction after loading this
1171 symbol, set the dest register number to *BASEREG, and set the size of
1172 instructions for loading symbol in OFFSET. Return 0 if instructions are
1176 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1177 unsigned int *destreg
, int *offset
)
1179 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1180 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1181 unsigned int low
, high
, address
;
1186 unsigned short insn1
1187 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1189 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1191 *destreg
= bits (insn1
, 8, 10);
1193 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1194 address
= read_memory_unsigned_integer (address
, 4,
1195 byte_order_for_code
);
1197 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1199 unsigned short insn2
1200 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1202 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1205 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1207 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1209 /* movt Rd, #const */
1210 if ((insn1
& 0xfbc0) == 0xf2c0)
1212 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1213 *destreg
= bits (insn2
, 8, 11);
1215 address
= (high
<< 16 | low
);
1222 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1224 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1226 address
= bits (insn
, 0, 11) + pc
+ 8;
1227 address
= read_memory_unsigned_integer (address
, 4,
1228 byte_order_for_code
);
1230 *destreg
= bits (insn
, 12, 15);
1233 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1235 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1238 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1240 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1242 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1243 *destreg
= bits (insn
, 12, 15);
1245 address
= (high
<< 16 | low
);
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1276 .word __stack_chk_guard
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1284 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1286 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1287 unsigned int basereg
;
1288 struct bound_minimal_symbol stack_chk_guard
;
1290 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1293 /* Try to parse the instructions in Step 1. */
1294 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1299 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1300 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1301 Otherwise, this sequence cannot be for stack protector. */
1302 if (stack_chk_guard
.minsym
== NULL
1303 || !startswith (stack_chk_guard
.minsym
->linkage_name (), "__stack_chk_guard"))
1308 unsigned int destreg
;
1310 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1312 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1313 if ((insn
& 0xf800) != 0x6800)
1315 if (bits (insn
, 3, 5) != basereg
)
1317 destreg
= bits (insn
, 0, 2);
1319 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1320 byte_order_for_code
);
1321 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1322 if ((insn
& 0xf800) != 0x6000)
1324 if (destreg
!= bits (insn
, 0, 2))
1329 unsigned int destreg
;
1331 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1333 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1334 if ((insn
& 0x0e500000) != 0x04100000)
1336 if (bits (insn
, 16, 19) != basereg
)
1338 destreg
= bits (insn
, 12, 15);
1339 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1340 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1341 4, byte_order_for_code
);
1342 if ((insn
& 0x0e500000) != 0x04000000)
1344 if (bits (insn
, 12, 15) != destreg
)
1347 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1350 return pc
+ offset
+ 4;
1352 return pc
+ offset
+ 8;
1355 /* Advance the PC across any function entry prologue instructions to
1356 reach some "real" code.
1358 The APCS (ARM Procedure Call Standard) defines the following
1362 [stmfd sp!, {a1,a2,a3,a4}]
1363 stmfd sp!, {...,fp,ip,lr,pc}
1364 [stfe f7, [sp, #-12]!]
1365 [stfe f6, [sp, #-12]!]
1366 [stfe f5, [sp, #-12]!]
1367 [stfe f4, [sp, #-12]!]
1368 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1371 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1373 CORE_ADDR func_addr
, limit_pc
;
1375 /* See if we can determine the end of the prologue via the symbol table.
1376 If so, then return either PC, or the PC after the prologue, whichever
1378 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1380 CORE_ADDR post_prologue_pc
1381 = skip_prologue_using_sal (gdbarch
, func_addr
);
1382 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1384 if (post_prologue_pc
)
1386 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1389 /* GCC always emits a line note before the prologue and another
1390 one after, even if the two are at the same address or on the
1391 same line. Take advantage of this so that we do not need to
1392 know every instruction that might appear in the prologue. We
1393 will have producer information for most binaries; if it is
1394 missing (e.g. for -gstabs), assuming the GNU tools. */
1395 if (post_prologue_pc
1397 || COMPUNIT_PRODUCER (cust
) == NULL
1398 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1399 || producer_is_llvm (COMPUNIT_PRODUCER (cust
))))
1400 return post_prologue_pc
;
1402 if (post_prologue_pc
!= 0)
1404 CORE_ADDR analyzed_limit
;
1406 /* For non-GCC compilers, make sure the entire line is an
1407 acceptable prologue; GDB will round this function's
1408 return value up to the end of the following line so we
1409 can not skip just part of a line (and we do not want to).
1411 RealView does not treat the prologue specially, but does
1412 associate prologue code with the opening brace; so this
1413 lets us skip the first line if we think it is the opening
1415 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1416 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1417 post_prologue_pc
, NULL
);
1420 = arm_analyze_prologue (gdbarch
, func_addr
, post_prologue_pc
,
1421 NULL
, target_arm_instruction_reader ());
1423 if (analyzed_limit
!= post_prologue_pc
)
1426 return post_prologue_pc
;
1430 /* Can't determine prologue from the symbol table, need to examine
1433 /* Find an upper limit on the function prologue using the debug
1434 information. If the debug information could not be used to provide
1435 that bound, then use an arbitrary large number as the upper bound. */
1436 /* Like arm_scan_prologue, stop no later than pc + 64. */
1437 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1439 limit_pc
= pc
+ 64; /* Magic. */
1442 /* Check if this is Thumb code. */
1443 if (arm_pc_is_thumb (gdbarch
, pc
))
1444 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1446 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
,
1447 target_arm_instruction_reader ());
1451 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1452 This function decodes a Thumb function prologue to determine:
1453 1) the size of the stack frame
1454 2) which registers are saved on it
1455 3) the offsets of saved regs
1456 4) the offset from the stack pointer to the frame pointer
1458 A typical Thumb function prologue would create this stack frame
1459 (offsets relative to FP)
1460 old SP -> 24 stack parameters
1463 R7 -> 0 local variables (16 bytes)
1464 SP -> -12 additional stack space (12 bytes)
1465 The frame size would thus be 36 bytes, and the frame offset would be
1466 12 bytes. The frame register is R7.
1468 The comments for thumb_skip_prolog() describe the algorithm we use
1469 to detect the end of the prolog. */
1473 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1474 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1476 CORE_ADDR prologue_start
;
1477 CORE_ADDR prologue_end
;
1479 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1482 /* See comment in arm_scan_prologue for an explanation of
1484 if (prologue_end
> prologue_start
+ 64)
1486 prologue_end
= prologue_start
+ 64;
1490 /* We're in the boondocks: we have no idea where the start of the
1494 prologue_end
= std::min (prologue_end
, prev_pc
);
1496 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1499 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1503 arm_instruction_restores_sp (unsigned int insn
)
1505 if (bits (insn
, 28, 31) != INST_NV
)
1507 if ((insn
& 0x0df0f000) == 0x0080d000
1508 /* ADD SP (register or immediate). */
1509 || (insn
& 0x0df0f000) == 0x0040d000
1510 /* SUB SP (register or immediate). */
1511 || (insn
& 0x0ffffff0) == 0x01a0d000
1513 || (insn
& 0x0fff0000) == 0x08bd0000
1515 || (insn
& 0x0fff0000) == 0x049d0000)
1516 /* POP of a single register. */
1523 /* Implement immediate value decoding, as described in section A5.2.4
1524 (Modified immediate constants in ARM instructions) of the ARM Architecture
1525 Reference Manual (ARMv7-A and ARMv7-R edition). */
1528 arm_expand_immediate (uint32_t imm
)
1530 /* Immediate values are 12 bits long. */
1531 gdb_assert ((imm
& 0xfffff000) == 0);
1533 uint32_t unrotated_value
= imm
& 0xff;
1534 uint32_t rotate_amount
= (imm
& 0xf00) >> 7;
1536 if (rotate_amount
== 0)
1537 return unrotated_value
;
1539 return ((unrotated_value
>> rotate_amount
)
1540 | (unrotated_value
<< (32 - rotate_amount
)));
1543 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1544 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1545 fill it in. Return the first address not recognized as a prologue
1548 We recognize all the instructions typically found in ARM prologues,
1549 plus harmless instructions which can be skipped (either for analysis
1550 purposes, or a more restrictive set that can be skipped when finding
1551 the end of the prologue). */
1554 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1555 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1556 struct arm_prologue_cache
*cache
,
1557 const arm_instruction_reader
&insn_reader
)
1559 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1561 CORE_ADDR offset
, current_pc
;
1562 pv_t regs
[ARM_FPS_REGNUM
];
1563 CORE_ADDR unrecognized_pc
= 0;
1564 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
1566 /* Search the prologue looking for instructions that set up the
1567 frame pointer, adjust the stack pointer, and save registers.
1569 Be careful, however, and if it doesn't look like a prologue,
1570 don't try to scan it. If, for instance, a frameless function
1571 begins with stmfd sp!, then we will tell ourselves there is
1572 a frame, which will confuse stack traceback, as well as "finish"
1573 and other operations that rely on a knowledge of the stack
1576 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1577 regs
[regno
] = pv_register (regno
, 0);
1578 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1580 for (current_pc
= prologue_start
;
1581 current_pc
< prologue_end
;
1584 uint32_t insn
= insn_reader
.read (current_pc
, byte_order_for_code
);
1586 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1588 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1591 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1592 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1594 uint32_t imm
= arm_expand_immediate (insn
& 0xfff);
1595 int rd
= bits (insn
, 12, 15);
1596 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1599 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1600 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1602 uint32_t imm
= arm_expand_immediate (insn
& 0xfff);
1603 int rd
= bits (insn
, 12, 15);
1604 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1607 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1610 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1612 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1613 stack
.store (regs
[ARM_SP_REGNUM
], 4,
1614 regs
[bits (insn
, 12, 15)]);
1617 else if ((insn
& 0xffff0000) == 0xe92d0000)
1618 /* stmfd sp!, {..., fp, ip, lr, pc}
1620 stmfd sp!, {a1, a2, a3, a4} */
1622 int mask
= insn
& 0xffff;
1624 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1627 /* Calculate offsets of saved registers. */
1628 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1629 if (mask
& (1 << regno
))
1632 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1633 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1636 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1637 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1638 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1640 /* No need to add this to saved_regs -- it's just an arg reg. */
1643 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1644 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1645 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1647 /* No need to add this to saved_regs -- it's just an arg reg. */
1650 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1652 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1654 /* No need to add this to saved_regs -- it's just arg regs. */
1657 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1659 uint32_t imm
= arm_expand_immediate (insn
& 0xfff);
1660 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1662 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1664 uint32_t imm
= arm_expand_immediate(insn
& 0xfff);
1665 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1667 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1669 && tdep
->have_fpa_registers
)
1671 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1674 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1675 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1676 stack
.store (regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1678 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1680 && tdep
->have_fpa_registers
)
1682 int n_saved_fp_regs
;
1683 unsigned int fp_start_reg
, fp_bound_reg
;
1685 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1688 if ((insn
& 0x800) == 0x800) /* N0 is set */
1690 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1691 n_saved_fp_regs
= 3;
1693 n_saved_fp_regs
= 1;
1697 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1698 n_saved_fp_regs
= 2;
1700 n_saved_fp_regs
= 4;
1703 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1704 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1705 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1707 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1708 stack
.store (regs
[ARM_SP_REGNUM
], 12,
1709 regs
[fp_start_reg
++]);
1712 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1714 /* Allow some special function calls when skipping the
1715 prologue; GCC generates these before storing arguments to
1717 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1719 if (skip_prologue_function (gdbarch
, dest
, 0))
1724 else if ((insn
& 0xf0000000) != 0xe0000000)
1725 break; /* Condition not true, exit early. */
1726 else if (arm_instruction_changes_pc (insn
))
1727 /* Don't scan past anything that might change control flow. */
1729 else if (arm_instruction_restores_sp (insn
))
1731 /* Don't scan past the epilogue. */
1734 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1735 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1736 /* Ignore block loads from the stack, potentially copying
1737 parameters from memory. */
1739 else if ((insn
& 0xfc500000) == 0xe4100000
1740 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1741 /* Similarly ignore single loads from the stack. */
1743 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1744 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1745 register instead of the stack. */
1749 /* The optimizer might shove anything into the prologue, if
1750 we build up cache (cache != NULL) from scanning prologue,
1751 we just skip what we don't recognize and scan further to
1752 make cache as complete as possible. However, if we skip
1753 prologue, we'll stop immediately on unrecognized
1755 unrecognized_pc
= current_pc
;
1763 if (unrecognized_pc
== 0)
1764 unrecognized_pc
= current_pc
;
1768 int framereg
, framesize
;
1770 /* The frame size is just the distance from the frame register
1771 to the original stack pointer. */
1772 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1774 /* Frame pointer is fp. */
1775 framereg
= ARM_FP_REGNUM
;
1776 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1780 /* Try the stack pointer... this is a bit desperate. */
1781 framereg
= ARM_SP_REGNUM
;
1782 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1785 cache
->framereg
= framereg
;
1786 cache
->framesize
= framesize
;
1788 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1789 if (stack
.find_reg (gdbarch
, regno
, &offset
))
1790 cache
->saved_regs
[regno
].set_addr (offset
);
1793 arm_debug_printf ("Prologue scan stopped at %s",
1794 paddress (gdbarch
, unrecognized_pc
));
1796 return unrecognized_pc
;
1800 arm_scan_prologue (struct frame_info
*this_frame
,
1801 struct arm_prologue_cache
*cache
)
1803 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1804 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1805 CORE_ADDR prologue_start
, prologue_end
;
1806 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1807 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1808 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
1810 /* Assume there is no frame until proven otherwise. */
1811 cache
->framereg
= ARM_SP_REGNUM
;
1812 cache
->framesize
= 0;
1814 /* Check for Thumb prologue. */
1815 if (arm_frame_is_thumb (this_frame
))
1817 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1821 /* Find the function prologue. If we can't find the function in
1822 the symbol table, peek in the stack frame to find the PC. */
1823 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1826 /* One way to find the end of the prologue (which works well
1827 for unoptimized code) is to do the following:
1829 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1832 prologue_end = prev_pc;
1833 else if (sal.end < prologue_end)
1834 prologue_end = sal.end;
1836 This mechanism is very accurate so long as the optimizer
1837 doesn't move any instructions from the function body into the
1838 prologue. If this happens, sal.end will be the last
1839 instruction in the first hunk of prologue code just before
1840 the first instruction that the scheduler has moved from
1841 the body to the prologue.
1843 In order to make sure that we scan all of the prologue
1844 instructions, we use a slightly less accurate mechanism which
1845 may scan more than necessary. To help compensate for this
1846 lack of accuracy, the prologue scanning loop below contains
1847 several clauses which'll cause the loop to terminate early if
1848 an implausible prologue instruction is encountered.
1854 is a suitable endpoint since it accounts for the largest
1855 possible prologue plus up to five instructions inserted by
1858 if (prologue_end
> prologue_start
+ 64)
1860 prologue_end
= prologue_start
+ 64; /* See above. */
1865 /* We have no symbol information. Our only option is to assume this
1866 function has a standard stack frame and the normal frame register.
1867 Then, we can find the value of our frame pointer on entrance to
1868 the callee (or at the present moment if this is the innermost frame).
1869 The value stored there should be the address of the stmfd + 8. */
1870 CORE_ADDR frame_loc
;
1871 ULONGEST return_value
;
1873 /* AAPCS does not use a frame register, so we can abort here. */
1874 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
1877 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1878 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1883 prologue_start
= gdbarch_addr_bits_remove
1884 (gdbarch
, return_value
) - 8;
1885 prologue_end
= prologue_start
+ 64; /* See above. */
1889 if (prev_pc
< prologue_end
)
1890 prologue_end
= prev_pc
;
1892 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
,
1893 target_arm_instruction_reader ());
1896 static struct arm_prologue_cache
*
1897 arm_make_prologue_cache (struct frame_info
*this_frame
)
1900 struct arm_prologue_cache
*cache
;
1901 CORE_ADDR unwound_fp
;
1903 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1904 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1906 arm_scan_prologue (this_frame
, cache
);
1908 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1909 if (unwound_fp
== 0)
1912 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1914 /* Calculate actual addresses of saved registers using offsets
1915 determined by arm_scan_prologue. */
1916 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1917 if (cache
->saved_regs
[reg
].is_addr ())
1918 cache
->saved_regs
[reg
].set_addr (cache
->saved_regs
[reg
].addr ()
1924 /* Implementation of the stop_reason hook for arm_prologue frames. */
1926 static enum unwind_stop_reason
1927 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1930 struct arm_prologue_cache
*cache
;
1933 if (*this_cache
== NULL
)
1934 *this_cache
= arm_make_prologue_cache (this_frame
);
1935 cache
= (struct arm_prologue_cache
*) *this_cache
;
1937 /* This is meant to halt the backtrace at "_start". */
1938 pc
= get_frame_pc (this_frame
);
1939 gdbarch
*arch
= get_frame_arch (this_frame
);
1940 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (arch
);
1941 if (pc
<= tdep
->lowest_pc
)
1942 return UNWIND_OUTERMOST
;
1944 /* If we've hit a wall, stop. */
1945 if (cache
->prev_sp
== 0)
1946 return UNWIND_OUTERMOST
;
1948 return UNWIND_NO_REASON
;
1951 /* Our frame ID for a normal frame is the current function's starting PC
1952 and the caller's SP when we were called. */
1955 arm_prologue_this_id (struct frame_info
*this_frame
,
1957 struct frame_id
*this_id
)
1959 struct arm_prologue_cache
*cache
;
1963 if (*this_cache
== NULL
)
1964 *this_cache
= arm_make_prologue_cache (this_frame
);
1965 cache
= (struct arm_prologue_cache
*) *this_cache
;
1967 /* Use function start address as part of the frame ID. If we cannot
1968 identify the start address (due to missing symbol information),
1969 fall back to just using the current PC. */
1970 pc
= get_frame_pc (this_frame
);
1971 func
= get_frame_func (this_frame
);
1975 id
= frame_id_build (cache
->prev_sp
, func
);
1979 static struct value
*
1980 arm_prologue_prev_register (struct frame_info
*this_frame
,
1984 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1985 struct arm_prologue_cache
*cache
;
1987 if (*this_cache
== NULL
)
1988 *this_cache
= arm_make_prologue_cache (this_frame
);
1989 cache
= (struct arm_prologue_cache
*) *this_cache
;
1991 /* If we are asked to unwind the PC, then we need to return the LR
1992 instead. The prologue may save PC, but it will point into this
1993 frame's prologue, not the next frame's resume location. Also
1994 strip the saved T bit. A valid LR may have the low bit set, but
1995 a valid PC never does. */
1996 if (prev_regnum
== ARM_PC_REGNUM
)
2000 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2001 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2002 arm_addr_bits_remove (gdbarch
, lr
));
2005 /* SP is generally not saved to the stack, but this frame is
2006 identified by the next frame's stack pointer at the time of the call.
2007 The value was already reconstructed into PREV_SP. */
2008 if (prev_regnum
== ARM_SP_REGNUM
)
2009 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2011 /* The CPSR may have been changed by the call instruction and by the
2012 called function. The only bit we can reconstruct is the T bit,
2013 by checking the low bit of LR as of the call. This is a reliable
2014 indicator of Thumb-ness except for some ARM v4T pre-interworking
2015 Thumb code, which could get away with a clear low bit as long as
2016 the called function did not use bx. Guess that all other
2017 bits are unchanged; the condition flags are presumably lost,
2018 but the processor status is likely valid. */
2019 if (prev_regnum
== ARM_PS_REGNUM
)
2022 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2024 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2025 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2026 if (IS_THUMB_ADDR (lr
))
2030 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2033 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2037 static frame_unwind arm_prologue_unwind
= {
2040 arm_prologue_unwind_stop_reason
,
2041 arm_prologue_this_id
,
2042 arm_prologue_prev_register
,
2044 default_frame_sniffer
2047 /* Maintain a list of ARM exception table entries per objfile, similar to the
2048 list of mapping symbols. We only cache entries for standard ARM-defined
2049 personality routines; the cache will contain only the frame unwinding
2050 instructions associated with the entry (not the descriptors). */
2052 struct arm_exidx_entry
2057 bool operator< (const arm_exidx_entry
&other
) const
2059 return addr
< other
.addr
;
2063 struct arm_exidx_data
2065 std::vector
<std::vector
<arm_exidx_entry
>> section_maps
;
2068 /* Per-BFD key to store exception handling information. */
2069 static const struct bfd_key
<arm_exidx_data
> arm_exidx_data_key
;
2071 static struct obj_section
*
2072 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2074 struct obj_section
*osect
;
2076 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2077 if (bfd_section_flags (osect
->the_bfd_section
) & SEC_ALLOC
)
2079 bfd_vma start
, size
;
2080 start
= bfd_section_vma (osect
->the_bfd_section
);
2081 size
= bfd_section_size (osect
->the_bfd_section
);
2083 if (start
<= vma
&& vma
< start
+ size
)
2090 /* Parse contents of exception table and exception index sections
2091 of OBJFILE, and fill in the exception table entry cache.
2093 For each entry that refers to a standard ARM-defined personality
2094 routine, extract the frame unwinding instructions (from either
2095 the index or the table section). The unwinding instructions
2097 - extracting them from the rest of the table data
2098 - converting to host endianness
2099 - appending the implicit 0xb0 ("Finish") code
2101 The extracted and normalized instructions are stored for later
2102 retrieval by the arm_find_exidx_entry routine. */
2105 arm_exidx_new_objfile (struct objfile
*objfile
)
2107 struct arm_exidx_data
*data
;
2108 asection
*exidx
, *extab
;
2109 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2112 /* If we've already touched this file, do nothing. */
2113 if (!objfile
|| arm_exidx_data_key
.get (objfile
->obfd
) != NULL
)
2116 /* Read contents of exception table and index. */
2117 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2118 gdb::byte_vector exidx_data
;
2121 exidx_vma
= bfd_section_vma (exidx
);
2122 exidx_data
.resize (bfd_section_size (exidx
));
2124 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2125 exidx_data
.data (), 0,
2126 exidx_data
.size ()))
2130 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2131 gdb::byte_vector extab_data
;
2134 extab_vma
= bfd_section_vma (extab
);
2135 extab_data
.resize (bfd_section_size (extab
));
2137 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2138 extab_data
.data (), 0,
2139 extab_data
.size ()))
2143 /* Allocate exception table data structure. */
2144 data
= arm_exidx_data_key
.emplace (objfile
->obfd
);
2145 data
->section_maps
.resize (objfile
->obfd
->section_count
);
2147 /* Fill in exception table. */
2148 for (i
= 0; i
< exidx_data
.size () / 8; i
++)
2150 struct arm_exidx_entry new_exidx_entry
;
2151 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
.data () + i
* 8);
2152 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
,
2153 exidx_data
.data () + i
* 8 + 4);
2154 bfd_vma addr
= 0, word
= 0;
2155 int n_bytes
= 0, n_words
= 0;
2156 struct obj_section
*sec
;
2157 gdb_byte
*entry
= NULL
;
2159 /* Extract address of start of function. */
2160 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2161 idx
+= exidx_vma
+ i
* 8;
2163 /* Find section containing function and compute section offset. */
2164 sec
= arm_obj_section_from_vma (objfile
, idx
);
2167 idx
-= bfd_section_vma (sec
->the_bfd_section
);
2169 /* Determine address of exception table entry. */
2172 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2174 else if ((val
& 0xff000000) == 0x80000000)
2176 /* Exception table entry embedded in .ARM.exidx
2177 -- must be short form. */
2181 else if (!(val
& 0x80000000))
2183 /* Exception table entry in .ARM.extab. */
2184 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2185 addr
+= exidx_vma
+ i
* 8 + 4;
2187 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_data
.size ())
2189 word
= bfd_h_get_32 (objfile
->obfd
,
2190 extab_data
.data () + addr
- extab_vma
);
2193 if ((word
& 0xff000000) == 0x80000000)
2198 else if ((word
& 0xff000000) == 0x81000000
2199 || (word
& 0xff000000) == 0x82000000)
2203 n_words
= ((word
>> 16) & 0xff);
2205 else if (!(word
& 0x80000000))
2208 struct obj_section
*pers_sec
;
2209 int gnu_personality
= 0;
2211 /* Custom personality routine. */
2212 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2213 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2215 /* Check whether we've got one of the variants of the
2216 GNU personality routines. */
2217 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2220 static const char *personality
[] =
2222 "__gcc_personality_v0",
2223 "__gxx_personality_v0",
2224 "__gcj_personality_v0",
2225 "__gnu_objc_personality_v0",
2229 CORE_ADDR pc
= pers
+ pers_sec
->offset ();
2232 for (k
= 0; personality
[k
]; k
++)
2233 if (lookup_minimal_symbol_by_pc_name
2234 (pc
, personality
[k
], objfile
))
2236 gnu_personality
= 1;
2241 /* If so, the next word contains a word count in the high
2242 byte, followed by the same unwind instructions as the
2243 pre-defined forms. */
2245 && addr
+ 4 <= extab_vma
+ extab_data
.size ())
2247 word
= bfd_h_get_32 (objfile
->obfd
,
2249 + addr
- extab_vma
));
2252 n_words
= ((word
>> 24) & 0xff);
2258 /* Sanity check address. */
2260 if (addr
< extab_vma
2261 || addr
+ 4 * n_words
> extab_vma
+ extab_data
.size ())
2262 n_words
= n_bytes
= 0;
2264 /* The unwind instructions reside in WORD (only the N_BYTES least
2265 significant bytes are valid), followed by N_WORDS words in the
2266 extab section starting at ADDR. */
2267 if (n_bytes
|| n_words
)
2270 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2271 n_bytes
+ n_words
* 4 + 1);
2274 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2278 word
= bfd_h_get_32 (objfile
->obfd
,
2279 extab_data
.data () + addr
- extab_vma
);
2282 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2283 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2284 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2285 *p
++ = (gdb_byte
) (word
& 0xff);
2288 /* Implied "Finish" to terminate the list. */
2292 /* Push entry onto vector. They are guaranteed to always
2293 appear in order of increasing addresses. */
2294 new_exidx_entry
.addr
= idx
;
2295 new_exidx_entry
.entry
= entry
;
2296 data
->section_maps
[sec
->the_bfd_section
->index
].push_back
2301 /* Search for the exception table entry covering MEMADDR. If one is found,
2302 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2303 set *START to the start of the region covered by this entry. */
2306 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2308 struct obj_section
*sec
;
2310 sec
= find_pc_section (memaddr
);
2313 struct arm_exidx_data
*data
;
2314 struct arm_exidx_entry map_key
= { memaddr
- sec
->addr (), 0 };
2316 data
= arm_exidx_data_key
.get (sec
->objfile
->obfd
);
2319 std::vector
<arm_exidx_entry
> &map
2320 = data
->section_maps
[sec
->the_bfd_section
->index
];
2323 auto idx
= std::lower_bound (map
.begin (), map
.end (), map_key
);
2325 /* std::lower_bound finds the earliest ordered insertion
2326 point. If the following symbol starts at this exact
2327 address, we use that; otherwise, the preceding
2328 exception table entry covers this address. */
2329 if (idx
< map
.end ())
2331 if (idx
->addr
== map_key
.addr
)
2334 *start
= idx
->addr
+ sec
->addr ();
2339 if (idx
> map
.begin ())
2343 *start
= idx
->addr
+ sec
->addr ();
2353 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2354 instruction list from the ARM exception table entry ENTRY, allocate and
2355 return a prologue cache structure describing how to unwind this frame.
2357 Return NULL if the unwinding instruction list contains a "spare",
2358 "reserved" or "refuse to unwind" instruction as defined in section
2359 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2360 for the ARM Architecture" document. */
2362 static struct arm_prologue_cache
*
2363 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2368 struct arm_prologue_cache
*cache
;
2369 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2370 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2376 /* Whenever we reload SP, we actually have to retrieve its
2377 actual value in the current frame. */
2380 if (cache
->saved_regs
[ARM_SP_REGNUM
].is_realreg ())
2382 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg ();
2383 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2387 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr ();
2388 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2394 /* Decode next unwind instruction. */
2397 if ((insn
& 0xc0) == 0)
2399 int offset
= insn
& 0x3f;
2400 vsp
+= (offset
<< 2) + 4;
2402 else if ((insn
& 0xc0) == 0x40)
2404 int offset
= insn
& 0x3f;
2405 vsp
-= (offset
<< 2) + 4;
2407 else if ((insn
& 0xf0) == 0x80)
2409 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2412 /* The special case of an all-zero mask identifies
2413 "Refuse to unwind". We return NULL to fall back
2414 to the prologue analyzer. */
2418 /* Pop registers r4..r15 under mask. */
2419 for (i
= 0; i
< 12; i
++)
2420 if (mask
& (1 << i
))
2422 cache
->saved_regs
[4 + i
].set_addr (vsp
);
2426 /* Special-case popping SP -- we need to reload vsp. */
2427 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2430 else if ((insn
& 0xf0) == 0x90)
2432 int reg
= insn
& 0xf;
2434 /* Reserved cases. */
2435 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2438 /* Set SP from another register and mark VSP for reload. */
2439 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2442 else if ((insn
& 0xf0) == 0xa0)
2444 int count
= insn
& 0x7;
2445 int pop_lr
= (insn
& 0x8) != 0;
2448 /* Pop r4..r[4+count]. */
2449 for (i
= 0; i
<= count
; i
++)
2451 cache
->saved_regs
[4 + i
].set_addr (vsp
);
2455 /* If indicated by flag, pop LR as well. */
2458 cache
->saved_regs
[ARM_LR_REGNUM
].set_addr (vsp
);
2462 else if (insn
== 0xb0)
2464 /* We could only have updated PC by popping into it; if so, it
2465 will show up as address. Otherwise, copy LR into PC. */
2466 if (!cache
->saved_regs
[ARM_PC_REGNUM
].is_addr ())
2467 cache
->saved_regs
[ARM_PC_REGNUM
]
2468 = cache
->saved_regs
[ARM_LR_REGNUM
];
2473 else if (insn
== 0xb1)
2475 int mask
= *entry
++;
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask
== 0 || mask
>= 16)
2482 /* Pop r0..r3 under mask. */
2483 for (i
= 0; i
< 4; i
++)
2484 if (mask
& (1 << i
))
2486 cache
->saved_regs
[i
].set_addr (vsp
);
2490 else if (insn
== 0xb2)
2492 ULONGEST offset
= 0;
2497 offset
|= (*entry
& 0x7f) << shift
;
2500 while (*entry
++ & 0x80);
2502 vsp
+= 0x204 + (offset
<< 2);
2504 else if (insn
== 0xb3)
2506 int start
= *entry
>> 4;
2507 int count
= (*entry
++) & 0xf;
2510 /* Only registers D0..D15 are valid here. */
2511 if (start
+ count
>= 16)
2514 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2515 for (i
= 0; i
<= count
; i
++)
2517 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].set_addr (vsp
);
2521 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2524 else if ((insn
& 0xf8) == 0xb8)
2526 int count
= insn
& 0x7;
2529 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2530 for (i
= 0; i
<= count
; i
++)
2532 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].set_addr (vsp
);
2536 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2539 else if (insn
== 0xc6)
2541 int start
= *entry
>> 4;
2542 int count
= (*entry
++) & 0xf;
2545 /* Only registers WR0..WR15 are valid. */
2546 if (start
+ count
>= 16)
2549 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2550 for (i
= 0; i
<= count
; i
++)
2552 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].set_addr (vsp
);
2556 else if (insn
== 0xc7)
2558 int mask
= *entry
++;
2561 /* All-zero mask and mask >= 16 is "spare". */
2562 if (mask
== 0 || mask
>= 16)
2565 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2566 for (i
= 0; i
< 4; i
++)
2567 if (mask
& (1 << i
))
2569 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].set_addr (vsp
);
2573 else if ((insn
& 0xf8) == 0xc0)
2575 int count
= insn
& 0x7;
2578 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2579 for (i
= 0; i
<= count
; i
++)
2581 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].set_addr (vsp
);
2585 else if (insn
== 0xc8)
2587 int start
= *entry
>> 4;
2588 int count
= (*entry
++) & 0xf;
2591 /* Only registers D0..D31 are valid. */
2592 if (start
+ count
>= 16)
2595 /* Pop VFP double-precision registers
2596 D[16+start]..D[16+start+count]. */
2597 for (i
= 0; i
<= count
; i
++)
2599 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].set_addr (vsp
);
2603 else if (insn
== 0xc9)
2605 int start
= *entry
>> 4;
2606 int count
= (*entry
++) & 0xf;
2609 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2610 for (i
= 0; i
<= count
; i
++)
2612 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].set_addr (vsp
);
2616 else if ((insn
& 0xf8) == 0xd0)
2618 int count
= insn
& 0x7;
2621 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2622 for (i
= 0; i
<= count
; i
++)
2624 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].set_addr (vsp
);
2630 /* Everything else is "spare". */
2635 /* If we restore SP from a register, assume this was the frame register.
2636 Otherwise just fall back to SP as frame register. */
2637 if (cache
->saved_regs
[ARM_SP_REGNUM
].is_realreg ())
2638 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg ();
2640 cache
->framereg
= ARM_SP_REGNUM
;
2642 /* Determine offset to previous frame. */
2644 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2646 /* We already got the previous SP. */
2647 cache
->prev_sp
= vsp
;
2652 /* Unwinding via ARM exception table entries. Note that the sniffer
2653 already computes a filled-in prologue cache, which is then used
2654 with the same arm_prologue_this_id and arm_prologue_prev_register
2655 routines also used for prologue-parsing based unwinding. */
2658 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2659 struct frame_info
*this_frame
,
2660 void **this_prologue_cache
)
2662 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2663 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2664 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2665 struct arm_prologue_cache
*cache
;
2668 /* See if we have an ARM exception table entry covering this address. */
2669 addr_in_block
= get_frame_address_in_block (this_frame
);
2670 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2674 /* The ARM exception table does not describe unwind information
2675 for arbitrary PC values, but is guaranteed to be correct only
2676 at call sites. We have to decide here whether we want to use
2677 ARM exception table information for this frame, or fall back
2678 to using prologue parsing. (Note that if we have DWARF CFI,
2679 this sniffer isn't even called -- CFI is always preferred.)
2681 Before we make this decision, however, we check whether we
2682 actually have *symbol* information for the current frame.
2683 If not, prologue parsing would not work anyway, so we might
2684 as well use the exception table and hope for the best. */
2685 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2689 /* If the next frame is "normal", we are at a call site in this
2690 frame, so exception information is guaranteed to be valid. */
2691 if (get_next_frame (this_frame
)
2692 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2695 /* We also assume exception information is valid if we're currently
2696 blocked in a system call. The system library is supposed to
2697 ensure this, so that e.g. pthread cancellation works. */
2698 if (arm_frame_is_thumb (this_frame
))
2702 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2703 2, byte_order_for_code
, &insn
)
2704 && (insn
& 0xff00) == 0xdf00 /* svc */)
2711 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2712 4, byte_order_for_code
, &insn
)
2713 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2717 /* Bail out if we don't know that exception information is valid. */
2721 /* The ARM exception index does not mark the *end* of the region
2722 covered by the entry, and some functions will not have any entry.
2723 To correctly recognize the end of the covered region, the linker
2724 should have inserted dummy records with a CANTUNWIND marker.
2726 Unfortunately, current versions of GNU ld do not reliably do
2727 this, and thus we may have found an incorrect entry above.
2728 As a (temporary) sanity check, we only use the entry if it
2729 lies *within* the bounds of the function. Note that this check
2730 might reject perfectly valid entries that just happen to cover
2731 multiple functions; therefore this check ought to be removed
2732 once the linker is fixed. */
2733 if (func_start
> exidx_region
)
2737 /* Decode the list of unwinding instructions into a prologue cache.
2738 Note that this may fail due to e.g. a "refuse to unwind" code. */
2739 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2743 *this_prologue_cache
= cache
;
2747 struct frame_unwind arm_exidx_unwind
= {
2750 default_frame_unwind_stop_reason
,
2751 arm_prologue_this_id
,
2752 arm_prologue_prev_register
,
2754 arm_exidx_unwind_sniffer
2757 static struct arm_prologue_cache
*
2758 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2760 struct arm_prologue_cache
*cache
;
2763 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2764 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2766 /* Still rely on the offset calculated from prologue. */
2767 arm_scan_prologue (this_frame
, cache
);
2769 /* Since we are in epilogue, the SP has been restored. */
2770 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2772 /* Calculate actual addresses of saved registers using offsets
2773 determined by arm_scan_prologue. */
2774 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2775 if (cache
->saved_regs
[reg
].is_addr ())
2776 cache
->saved_regs
[reg
].set_addr (cache
->saved_regs
[reg
].addr ()
2782 /* Implementation of function hook 'this_id' in
2783 'struct frame_uwnind' for epilogue unwinder. */
2786 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2788 struct frame_id
*this_id
)
2790 struct arm_prologue_cache
*cache
;
2793 if (*this_cache
== NULL
)
2794 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2795 cache
= (struct arm_prologue_cache
*) *this_cache
;
2797 /* Use function start address as part of the frame ID. If we cannot
2798 identify the start address (due to missing symbol information),
2799 fall back to just using the current PC. */
2800 pc
= get_frame_pc (this_frame
);
2801 func
= get_frame_func (this_frame
);
2805 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2808 /* Implementation of function hook 'prev_register' in
2809 'struct frame_uwnind' for epilogue unwinder. */
2811 static struct value
*
2812 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2813 void **this_cache
, int regnum
)
2815 if (*this_cache
== NULL
)
2816 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2818 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2821 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2823 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2826 /* Implementation of function hook 'sniffer' in
2827 'struct frame_uwnind' for epilogue unwinder. */
2830 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2831 struct frame_info
*this_frame
,
2832 void **this_prologue_cache
)
2834 if (frame_relative_level (this_frame
) == 0)
2836 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2837 CORE_ADDR pc
= get_frame_pc (this_frame
);
2839 if (arm_frame_is_thumb (this_frame
))
2840 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2842 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2848 /* Frame unwinder from epilogue. */
2850 static const struct frame_unwind arm_epilogue_frame_unwind
=
2854 default_frame_unwind_stop_reason
,
2855 arm_epilogue_frame_this_id
,
2856 arm_epilogue_frame_prev_register
,
2858 arm_epilogue_frame_sniffer
,
2861 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2862 trampoline, return the target PC. Otherwise return 0.
2864 void call0a (char c, short s, int i, long l) {}
2868 (*pointer_to_call0a) (c, s, i, l);
2871 Instead of calling a stub library function _call_via_xx (xx is
2872 the register name), GCC may inline the trampoline in the object
2873 file as below (register r2 has the address of call0a).
2876 .type main, %function
2885 The trampoline 'bx r2' doesn't belong to main. */
2888 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2890 /* The heuristics of recognizing such trampoline is that FRAME is
2891 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2892 if (arm_frame_is_thumb (frame
))
2896 if (target_read_memory (pc
, buf
, 2) == 0)
2898 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2899 enum bfd_endian byte_order_for_code
2900 = gdbarch_byte_order_for_code (gdbarch
);
2902 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2904 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2907 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2909 /* Clear the LSB so that gdb core sets step-resume
2910 breakpoint at the right address. */
2911 return UNMAKE_THUMB_ADDR (dest
);
2919 static struct arm_prologue_cache
*
2920 arm_make_stub_cache (struct frame_info
*this_frame
)
2922 struct arm_prologue_cache
*cache
;
2924 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2925 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2927 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2932 /* Our frame ID for a stub frame is the current SP and LR. */
2935 arm_stub_this_id (struct frame_info
*this_frame
,
2937 struct frame_id
*this_id
)
2939 struct arm_prologue_cache
*cache
;
2941 if (*this_cache
== NULL
)
2942 *this_cache
= arm_make_stub_cache (this_frame
);
2943 cache
= (struct arm_prologue_cache
*) *this_cache
;
2945 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2949 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2950 struct frame_info
*this_frame
,
2951 void **this_prologue_cache
)
2953 CORE_ADDR addr_in_block
;
2955 CORE_ADDR pc
, start_addr
;
2958 addr_in_block
= get_frame_address_in_block (this_frame
);
2959 pc
= get_frame_pc (this_frame
);
2960 if (in_plt_section (addr_in_block
)
2961 /* We also use the stub winder if the target memory is unreadable
2962 to avoid having the prologue unwinder trying to read it. */
2963 || target_read_memory (pc
, dummy
, 4) != 0)
2966 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2967 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2973 struct frame_unwind arm_stub_unwind
= {
2976 default_frame_unwind_stop_reason
,
2978 arm_prologue_prev_register
,
2980 arm_stub_unwind_sniffer
2983 /* Put here the code to store, into CACHE->saved_regs, the addresses
2984 of the saved registers of frame described by THIS_FRAME. CACHE is
2987 static struct arm_prologue_cache
*
2988 arm_m_exception_cache (struct frame_info
*this_frame
)
2990 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2991 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2992 struct arm_prologue_cache
*cache
;
2995 CORE_ADDR unwound_sp
;
2997 uint32_t exc_return
;
2998 uint32_t process_stack_used
;
2999 uint32_t extended_frame_used
;
3000 uint32_t secure_stack_used
;
3002 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3003 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3005 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3006 describes which bits in LR that define which stack was used prior
3007 to the exception and if FPU is used (causing extended stack frame). */
3009 lr
= get_frame_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3010 sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3012 /* Check EXC_RETURN indicator bits. */
3013 exc_return
= (((lr
>> 28) & 0xf) == 0xf);
3015 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3016 process_stack_used
= ((lr
& (1 << 2)) != 0);
3017 if (exc_return
&& process_stack_used
)
3019 /* Thread (process) stack used.
3020 Potentially this could be other register defined by target, but PSP
3021 can be considered a standard name for the "Process Stack Pointer".
3022 To be fully aware of system registers like MSP and PSP, these could
3023 be added to a separate XML arm-m-system-profile that is valid for
3024 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3025 corefile off-line, then these registers must be defined by GDB,
3026 and also be included in the corefile regsets. */
3028 int psp_regnum
= user_reg_map_name_to_regnum (gdbarch
, "psp", -1);
3029 if (psp_regnum
== -1)
3031 /* Thread (process) stack could not be fetched,
3032 give warning and exit. */
3034 warning (_("no PSP thread stack unwinding supported."));
3036 /* Terminate any further stack unwinding by refer to self. */
3037 cache
->prev_sp
= sp
;
3042 /* Thread (process) stack used, use PSP as SP. */
3043 unwound_sp
= get_frame_register_unsigned (this_frame
, psp_regnum
);
3048 /* Main stack used, use MSP as SP. */
3052 /* The hardware saves eight 32-bit words, comprising xPSR,
3053 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3054 "B1.5.6 Exception entry behavior" in
3055 "ARMv7-M Architecture Reference Manual". */
3056 cache
->saved_regs
[0].set_addr (unwound_sp
);
3057 cache
->saved_regs
[1].set_addr (unwound_sp
+ 4);
3058 cache
->saved_regs
[2].set_addr (unwound_sp
+ 8);
3059 cache
->saved_regs
[3].set_addr (unwound_sp
+ 12);
3060 cache
->saved_regs
[ARM_IP_REGNUM
].set_addr (unwound_sp
+ 16);
3061 cache
->saved_regs
[ARM_LR_REGNUM
].set_addr (unwound_sp
+ 20);
3062 cache
->saved_regs
[ARM_PC_REGNUM
].set_addr (unwound_sp
+ 24);
3063 cache
->saved_regs
[ARM_PS_REGNUM
].set_addr (unwound_sp
+ 28);
3065 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3067 extended_frame_used
= ((lr
& (1 << 4)) == 0);
3068 if (exc_return
&& extended_frame_used
)
3071 int fpu_regs_stack_offset
;
3073 /* This code does not take into account the lazy stacking, see "Lazy
3074 context save of FP state", in B1.5.7, also ARM AN298, supported
3075 by Cortex-M4F architecture.
3076 To fully handle this the FPCCR register (Floating-point Context
3077 Control Register) needs to be read out and the bits ASPEN and LSPEN
3078 could be checked to setup correct lazy stacked FP registers.
3079 This register is located at address 0xE000EF34. */
3081 /* Extended stack frame type used. */
3082 fpu_regs_stack_offset
= unwound_sp
+ 0x20;
3083 for (i
= 0; i
< 16; i
++)
3085 cache
->saved_regs
[ARM_D0_REGNUM
+ i
].set_addr (fpu_regs_stack_offset
);
3086 fpu_regs_stack_offset
+= 4;
3088 cache
->saved_regs
[ARM_FPSCR_REGNUM
].set_addr (unwound_sp
+ 0x60);
3090 /* Offset 0x64 is reserved. */
3091 cache
->prev_sp
= unwound_sp
+ 0x68;
3095 /* Standard stack frame type used. */
3096 cache
->prev_sp
= unwound_sp
+ 0x20;
3099 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3100 secure_stack_used
= ((lr
& (1 << 6)) != 0);
3101 if (exc_return
&& secure_stack_used
)
3103 /* ARMv8-M Exception and interrupt handling is not considered here.
3104 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3105 the Secure or Non-secure stack was used. To separate Secure and
3106 Non-secure stacks, processors that are based on the ARMv8-M
3107 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3108 In addition, a stack limit feature is provided using stack limit
3109 registers (accessible using MSR and MRS instructions) in Privileged
3113 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3114 aligner between the top of the 32-byte stack frame and the
3115 previous context's stack pointer. */
3116 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3117 && (xpsr
& (1 << 9)) != 0)
3118 cache
->prev_sp
+= 4;
3123 /* Implementation of function hook 'this_id' in
3124 'struct frame_uwnind'. */
3127 arm_m_exception_this_id (struct frame_info
*this_frame
,
3129 struct frame_id
*this_id
)
3131 struct arm_prologue_cache
*cache
;
3133 if (*this_cache
== NULL
)
3134 *this_cache
= arm_m_exception_cache (this_frame
);
3135 cache
= (struct arm_prologue_cache
*) *this_cache
;
3137 /* Our frame ID for a stub frame is the current SP and LR. */
3138 *this_id
= frame_id_build (cache
->prev_sp
,
3139 get_frame_pc (this_frame
));
3142 /* Implementation of function hook 'prev_register' in
3143 'struct frame_uwnind'. */
3145 static struct value
*
3146 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3150 struct arm_prologue_cache
*cache
;
3152 if (*this_cache
== NULL
)
3153 *this_cache
= arm_m_exception_cache (this_frame
);
3154 cache
= (struct arm_prologue_cache
*) *this_cache
;
3156 /* The value was already reconstructed into PREV_SP. */
3157 if (prev_regnum
== ARM_SP_REGNUM
)
3158 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3161 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3165 /* Implementation of function hook 'sniffer' in
3166 'struct frame_uwnind'. */
3169 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3170 struct frame_info
*this_frame
,
3171 void **this_prologue_cache
)
3173 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3175 /* No need to check is_m; this sniffer is only registered for
3176 M-profile architectures. */
3178 /* Check if exception frame returns to a magic PC value. */
3179 return arm_m_addr_is_magic (this_pc
);
3182 /* Frame unwinder for M-profile exceptions. */
3184 struct frame_unwind arm_m_exception_unwind
=
3188 default_frame_unwind_stop_reason
,
3189 arm_m_exception_this_id
,
3190 arm_m_exception_prev_register
,
3192 arm_m_exception_unwind_sniffer
3196 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3198 struct arm_prologue_cache
*cache
;
3200 if (*this_cache
== NULL
)
3201 *this_cache
= arm_make_prologue_cache (this_frame
);
3202 cache
= (struct arm_prologue_cache
*) *this_cache
;
3204 return cache
->prev_sp
- cache
->framesize
;
3207 struct frame_base arm_normal_base
= {
3208 &arm_prologue_unwind
,
3209 arm_normal_frame_base
,
3210 arm_normal_frame_base
,
3211 arm_normal_frame_base
3214 static struct value
*
3215 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3218 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3220 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3225 /* The PC is normally copied from the return column, which
3226 describes saves of LR. However, that version may have an
3227 extra bit set to indicate Thumb state. The bit is not
3229 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3230 return frame_unwind_got_constant (this_frame
, regnum
,
3231 arm_addr_bits_remove (gdbarch
, lr
));
3234 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3235 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3236 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3237 if (IS_THUMB_ADDR (lr
))
3241 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3244 internal_error (__FILE__
, __LINE__
,
3245 _("Unexpected register %d"), regnum
);
3250 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3251 struct dwarf2_frame_state_reg
*reg
,
3252 struct frame_info
*this_frame
)
3258 reg
->how
= DWARF2_FRAME_REG_FN
;
3259 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3262 reg
->how
= DWARF2_FRAME_REG_CFA
;
3267 /* Implement the stack_frame_destroyed_p gdbarch method. */
3270 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3272 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3273 unsigned int insn
, insn2
;
3274 int found_return
= 0, found_stack_adjust
= 0;
3275 CORE_ADDR func_start
, func_end
;
3279 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3282 /* The epilogue is a sequence of instructions along the following lines:
3284 - add stack frame size to SP or FP
3285 - [if frame pointer used] restore SP from FP
3286 - restore registers from SP [may include PC]
3287 - a return-type instruction [if PC wasn't already restored]
3289 In a first pass, we scan forward from the current PC and verify the
3290 instructions we find as compatible with this sequence, ending in a
3293 However, this is not sufficient to distinguish indirect function calls
3294 within a function from indirect tail calls in the epilogue in some cases.
3295 Therefore, if we didn't already find any SP-changing instruction during
3296 forward scan, we add a backward scanning heuristic to ensure we actually
3297 are in the epilogue. */
3300 while (scan_pc
< func_end
&& !found_return
)
3302 if (target_read_memory (scan_pc
, buf
, 2))
3306 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3308 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3310 else if (insn
== 0x46f7) /* mov pc, lr */
3312 else if (thumb_instruction_restores_sp (insn
))
3314 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3317 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3319 if (target_read_memory (scan_pc
, buf
, 2))
3323 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3325 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3327 if (insn2
& 0x8000) /* <registers> include PC. */
3330 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3331 && (insn2
& 0x0fff) == 0x0b04)
3333 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3336 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2
& 0x0e00) == 0x0a00)
3349 /* Since any instruction in the epilogue sequence, with the possible
3350 exception of return itself, updates the stack pointer, we need to
3351 scan backwards for at most one instruction. Try either a 16-bit or
3352 a 32-bit instruction. This is just a heuristic, so we do not worry
3353 too much about false positives. */
3355 if (pc
- 4 < func_start
)
3357 if (target_read_memory (pc
- 4, buf
, 4))
3360 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3361 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3363 if (thumb_instruction_restores_sp (insn2
))
3364 found_stack_adjust
= 1;
3365 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3366 found_stack_adjust
= 1;
3367 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3368 && (insn2
& 0x0fff) == 0x0b04)
3369 found_stack_adjust
= 1;
3370 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3371 && (insn2
& 0x0e00) == 0x0a00)
3372 found_stack_adjust
= 1;
3374 return found_stack_adjust
;
3378 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3380 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3383 CORE_ADDR func_start
, func_end
;
3385 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3388 /* We are in the epilogue if the previous instruction was a stack
3389 adjustment and the next instruction is a possible return (bx, mov
3390 pc, or pop). We could have to scan backwards to find the stack
3391 adjustment, or forwards to find the return, but this is a decent
3392 approximation. First scan forwards. */
3395 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3396 if (bits (insn
, 28, 31) != INST_NV
)
3398 if ((insn
& 0x0ffffff0) == 0x012fff10)
3401 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3404 else if ((insn
& 0x0fff0000) == 0x08bd0000
3405 && (insn
& 0x0000c000) != 0)
3406 /* POP (LDMIA), including PC or LR. */
3413 /* Scan backwards. This is just a heuristic, so do not worry about
3414 false positives from mode changes. */
3416 if (pc
< func_start
+ 4)
3419 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3420 if (arm_instruction_restores_sp (insn
))
3426 /* Implement the stack_frame_destroyed_p gdbarch method. */
3429 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3431 if (arm_pc_is_thumb (gdbarch
, pc
))
3432 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3434 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3437 /* When arguments must be pushed onto the stack, they go on in reverse
3438 order. The code below implements a FILO (stack) to do this. */
3443 struct stack_item
*prev
;
3447 static struct stack_item
*
3448 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3450 struct stack_item
*si
;
3451 si
= XNEW (struct stack_item
);
3452 si
->data
= (gdb_byte
*) xmalloc (len
);
3455 memcpy (si
->data
, contents
, len
);
3459 static struct stack_item
*
3460 pop_stack_item (struct stack_item
*si
)
3462 struct stack_item
*dead
= si
;
3469 /* Implement the gdbarch type alignment method, overrides the generic
3470 alignment algorithm for anything that is arm specific. */
3473 arm_type_align (gdbarch
*gdbarch
, struct type
*t
)
3475 t
= check_typedef (t
);
3476 if (t
->code () == TYPE_CODE_ARRAY
&& t
->is_vector ())
3478 /* Use the natural alignment for vector types (the same for
3479 scalar type), but the maximum alignment is 64-bit. */
3480 if (TYPE_LENGTH (t
) > 8)
3483 return TYPE_LENGTH (t
);
3486 /* Allow the common code to calculate the alignment. */
3490 /* Possible base types for a candidate for passing and returning in
3493 enum arm_vfp_cprc_base_type
3502 /* The length of one element of base type B. */
3505 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3509 case VFP_CPRC_SINGLE
:
3511 case VFP_CPRC_DOUBLE
:
3513 case VFP_CPRC_VEC64
:
3515 case VFP_CPRC_VEC128
:
3518 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3523 /* The character ('s', 'd' or 'q') for the type of VFP register used
3524 for passing base type B. */
3527 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3531 case VFP_CPRC_SINGLE
:
3533 case VFP_CPRC_DOUBLE
:
3535 case VFP_CPRC_VEC64
:
3537 case VFP_CPRC_VEC128
:
3540 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3545 /* Determine whether T may be part of a candidate for passing and
3546 returning in VFP registers, ignoring the limit on the total number
3547 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3548 classification of the first valid component found; if it is not
3549 VFP_CPRC_UNKNOWN, all components must have the same classification
3550 as *BASE_TYPE. If it is found that T contains a type not permitted
3551 for passing and returning in VFP registers, a type differently
3552 classified from *BASE_TYPE, or two types differently classified
3553 from each other, return -1, otherwise return the total number of
3554 base-type elements found (possibly 0 in an empty structure or
3555 array). Vector types are not currently supported, matching the
3556 generic AAPCS support. */
3559 arm_vfp_cprc_sub_candidate (struct type
*t
,
3560 enum arm_vfp_cprc_base_type
*base_type
)
3562 t
= check_typedef (t
);
3566 switch (TYPE_LENGTH (t
))
3569 if (*base_type
== VFP_CPRC_UNKNOWN
)
3570 *base_type
= VFP_CPRC_SINGLE
;
3571 else if (*base_type
!= VFP_CPRC_SINGLE
)
3576 if (*base_type
== VFP_CPRC_UNKNOWN
)
3577 *base_type
= VFP_CPRC_DOUBLE
;
3578 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3587 case TYPE_CODE_COMPLEX
:
3588 /* Arguments of complex T where T is one of the types float or
3589 double get treated as if they are implemented as:
3598 switch (TYPE_LENGTH (t
))
3601 if (*base_type
== VFP_CPRC_UNKNOWN
)
3602 *base_type
= VFP_CPRC_SINGLE
;
3603 else if (*base_type
!= VFP_CPRC_SINGLE
)
3608 if (*base_type
== VFP_CPRC_UNKNOWN
)
3609 *base_type
= VFP_CPRC_DOUBLE
;
3610 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3619 case TYPE_CODE_ARRAY
:
3621 if (t
->is_vector ())
3623 /* A 64-bit or 128-bit containerized vector type are VFP
3625 switch (TYPE_LENGTH (t
))
3628 if (*base_type
== VFP_CPRC_UNKNOWN
)
3629 *base_type
= VFP_CPRC_VEC64
;
3632 if (*base_type
== VFP_CPRC_UNKNOWN
)
3633 *base_type
= VFP_CPRC_VEC128
;
3644 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3648 if (TYPE_LENGTH (t
) == 0)
3650 gdb_assert (count
== 0);
3653 else if (count
== 0)
3655 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3656 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3657 return TYPE_LENGTH (t
) / unitlen
;
3662 case TYPE_CODE_STRUCT
:
3667 for (i
= 0; i
< t
->num_fields (); i
++)
3671 if (!field_is_static (&t
->field (i
)))
3672 sub_count
= arm_vfp_cprc_sub_candidate (t
->field (i
).type (),
3674 if (sub_count
== -1)
3678 if (TYPE_LENGTH (t
) == 0)
3680 gdb_assert (count
== 0);
3683 else if (count
== 0)
3685 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3686 if (TYPE_LENGTH (t
) != unitlen
* count
)
3691 case TYPE_CODE_UNION
:
3696 for (i
= 0; i
< t
->num_fields (); i
++)
3698 int sub_count
= arm_vfp_cprc_sub_candidate (t
->field (i
).type (),
3700 if (sub_count
== -1)
3702 count
= (count
> sub_count
? count
: sub_count
);
3704 if (TYPE_LENGTH (t
) == 0)
3706 gdb_assert (count
== 0);
3709 else if (count
== 0)
3711 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3712 if (TYPE_LENGTH (t
) != unitlen
* count
)
3724 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3725 if passed to or returned from a non-variadic function with the VFP
3726 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3727 *BASE_TYPE to the base type for T and *COUNT to the number of
3728 elements of that base type before returning. */
3731 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3734 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3735 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3736 if (c
<= 0 || c
> 4)
3743 /* Return 1 if the VFP ABI should be used for passing arguments to and
3744 returning values from a function of type FUNC_TYPE, 0
3748 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3750 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
3752 /* Variadic functions always use the base ABI. Assume that functions
3753 without debug info are not variadic. */
3754 if (func_type
&& check_typedef (func_type
)->has_varargs ())
3757 /* The VFP ABI is only supported as a variant of AAPCS. */
3758 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3761 return tdep
->fp_model
== ARM_FLOAT_VFP
;
3764 /* We currently only support passing parameters in integer registers, which
3765 conforms with GCC's default model, and VFP argument passing following
3766 the VFP variant of AAPCS. Several other variants exist and
3767 we should probably support some of them based on the selected ABI. */
3770 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3771 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3772 struct value
**args
, CORE_ADDR sp
,
3773 function_call_return_method return_method
,
3774 CORE_ADDR struct_addr
)
3776 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3780 struct stack_item
*si
= NULL
;
3783 unsigned vfp_regs_free
= (1 << 16) - 1;
3784 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
3786 /* Determine the type of this function and whether the VFP ABI
3788 ftype
= check_typedef (value_type (function
));
3789 if (ftype
->code () == TYPE_CODE_PTR
)
3790 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3791 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3793 /* Set the return address. For the ARM, the return breakpoint is
3794 always at BP_ADDR. */
3795 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3797 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3799 /* Walk through the list of args and determine how large a temporary
3800 stack is required. Need to take care here as structs may be
3801 passed on the stack, and we have to push them. */
3804 argreg
= ARM_A1_REGNUM
;
3807 /* The struct_return pointer occupies the first parameter
3808 passing register. */
3809 if (return_method
== return_method_struct
)
3811 arm_debug_printf ("struct return in %s = %s",
3812 gdbarch_register_name (gdbarch
, argreg
),
3813 paddress (gdbarch
, struct_addr
));
3815 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3819 for (argnum
= 0; argnum
< nargs
; argnum
++)
3822 struct type
*arg_type
;
3823 struct type
*target_type
;
3824 enum type_code typecode
;
3825 const bfd_byte
*val
;
3827 enum arm_vfp_cprc_base_type vfp_base_type
;
3829 int may_use_core_reg
= 1;
3831 arg_type
= check_typedef (value_type (args
[argnum
]));
3832 len
= TYPE_LENGTH (arg_type
);
3833 target_type
= TYPE_TARGET_TYPE (arg_type
);
3834 typecode
= arg_type
->code ();
3835 val
= value_contents (args
[argnum
]).data ();
3837 align
= type_align (arg_type
);
3838 /* Round alignment up to a whole number of words. */
3839 align
= (align
+ ARM_INT_REGISTER_SIZE
- 1)
3840 & ~(ARM_INT_REGISTER_SIZE
- 1);
3841 /* Different ABIs have different maximum alignments. */
3842 if (tdep
->arm_abi
== ARM_ABI_APCS
)
3844 /* The APCS ABI only requires word alignment. */
3845 align
= ARM_INT_REGISTER_SIZE
;
3849 /* The AAPCS requires at most doubleword alignment. */
3850 if (align
> ARM_INT_REGISTER_SIZE
* 2)
3851 align
= ARM_INT_REGISTER_SIZE
* 2;
3855 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3863 /* Because this is a CPRC it cannot go in a core register or
3864 cause a core register to be skipped for alignment.
3865 Either it goes in VFP registers and the rest of this loop
3866 iteration is skipped for this argument, or it goes on the
3867 stack (and the stack alignment code is correct for this
3869 may_use_core_reg
= 0;
3871 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3872 shift
= unit_length
/ 4;
3873 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3874 for (regno
= 0; regno
< 16; regno
+= shift
)
3875 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3884 vfp_regs_free
&= ~(mask
<< regno
);
3885 reg_scaled
= regno
/ shift
;
3886 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3887 for (i
= 0; i
< vfp_base_count
; i
++)
3891 if (reg_char
== 'q')
3892 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3893 val
+ i
* unit_length
);
3896 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3897 reg_char
, reg_scaled
+ i
);
3898 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3900 regcache
->cooked_write (regnum
, val
+ i
* unit_length
);
3907 /* This CPRC could not go in VFP registers, so all VFP
3908 registers are now marked as used. */
3913 /* Push stack padding for doubleword alignment. */
3914 if (nstack
& (align
- 1))
3916 si
= push_stack_item (si
, val
, ARM_INT_REGISTER_SIZE
);
3917 nstack
+= ARM_INT_REGISTER_SIZE
;
3920 /* Doubleword aligned quantities must go in even register pairs. */
3921 if (may_use_core_reg
3922 && argreg
<= ARM_LAST_ARG_REGNUM
3923 && align
> ARM_INT_REGISTER_SIZE
3927 /* If the argument is a pointer to a function, and it is a
3928 Thumb function, create a LOCAL copy of the value and set
3929 the THUMB bit in it. */
3930 if (TYPE_CODE_PTR
== typecode
3931 && target_type
!= NULL
3932 && TYPE_CODE_FUNC
== check_typedef (target_type
)->code ())
3934 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3935 if (arm_pc_is_thumb (gdbarch
, regval
))
3937 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3938 store_unsigned_integer (copy
, len
, byte_order
,
3939 MAKE_THUMB_ADDR (regval
));
3944 /* Copy the argument to general registers or the stack in
3945 register-sized pieces. Large arguments are split between
3946 registers and stack. */
3949 int partial_len
= len
< ARM_INT_REGISTER_SIZE
3950 ? len
: ARM_INT_REGISTER_SIZE
;
3952 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3954 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3956 /* The argument is being passed in a general purpose
3958 if (byte_order
== BFD_ENDIAN_BIG
)
3959 regval
<<= (ARM_INT_REGISTER_SIZE
- partial_len
) * 8;
3961 arm_debug_printf ("arg %d in %s = 0x%s", argnum
,
3962 gdbarch_register_name (gdbarch
, argreg
),
3963 phex (regval
, ARM_INT_REGISTER_SIZE
));
3965 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3970 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
3972 memset (buf
, 0, sizeof (buf
));
3973 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3975 /* Push the arguments onto the stack. */
3976 arm_debug_printf ("arg %d @ sp + %d", argnum
, nstack
);
3977 si
= push_stack_item (si
, buf
, ARM_INT_REGISTER_SIZE
);
3978 nstack
+= ARM_INT_REGISTER_SIZE
;
3985 /* If we have an odd number of words to push, then decrement the stack
3986 by one word now, so first stack argument will be dword aligned. */
3993 write_memory (sp
, si
->data
, si
->len
);
3994 si
= pop_stack_item (si
);
3997 /* Finally, update teh SP register. */
3998 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
4004 /* Always align the frame to an 8-byte boundary. This is required on
4005 some platforms and harmless on the rest. */
4008 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
4010 /* Align the stack to eight bytes. */
4011 return sp
& ~ (CORE_ADDR
) 7;
4015 print_fpu_flags (struct ui_file
*file
, int flags
)
4017 if (flags
& (1 << 0))
4018 fputs_filtered ("IVO ", file
);
4019 if (flags
& (1 << 1))
4020 fputs_filtered ("DVZ ", file
);
4021 if (flags
& (1 << 2))
4022 fputs_filtered ("OFL ", file
);
4023 if (flags
& (1 << 3))
4024 fputs_filtered ("UFL ", file
);
4025 if (flags
& (1 << 4))
4026 fputs_filtered ("INX ", file
);
4027 fputc_filtered ('\n', file
);
4030 /* Print interesting information about the floating point processor
4031 (if present) or emulator. */
4033 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
4034 struct frame_info
*frame
, const char *args
)
4036 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
4039 type
= (status
>> 24) & 127;
4040 if (status
& (1 << 31))
4041 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
4043 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
4044 /* i18n: [floating point unit] mask */
4045 fputs_filtered (_("mask: "), file
);
4046 print_fpu_flags (file
, status
>> 16);
4047 /* i18n: [floating point unit] flags */
4048 fputs_filtered (_("flags: "), file
);
4049 print_fpu_flags (file
, status
);
4052 /* Construct the ARM extended floating point type. */
4053 static struct type
*
4054 arm_ext_type (struct gdbarch
*gdbarch
)
4056 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4058 if (!tdep
->arm_ext_type
)
4060 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4061 floatformats_arm_ext
);
4063 return tdep
->arm_ext_type
;
4066 static struct type
*
4067 arm_neon_double_type (struct gdbarch
*gdbarch
)
4069 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4071 if (tdep
->neon_double_type
== NULL
)
4073 struct type
*t
, *elem
;
4075 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4077 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4078 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4079 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4080 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4081 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4082 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4083 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4084 append_composite_type_field (t
, "u64", elem
);
4085 elem
= builtin_type (gdbarch
)->builtin_float
;
4086 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4087 elem
= builtin_type (gdbarch
)->builtin_double
;
4088 append_composite_type_field (t
, "f64", elem
);
4090 t
->set_is_vector (true);
4091 t
->set_name ("neon_d");
4092 tdep
->neon_double_type
= t
;
4095 return tdep
->neon_double_type
;
4098 /* FIXME: The vector types are not correctly ordered on big-endian
4099 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4100 bits of d0 - regardless of what unit size is being held in d0. So
4101 the offset of the first uint8 in d0 is 7, but the offset of the
4102 first float is 4. This code works as-is for little-endian
4105 static struct type
*
4106 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4108 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4110 if (tdep
->neon_quad_type
== NULL
)
4112 struct type
*t
, *elem
;
4114 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4116 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4117 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4118 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4119 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4120 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4121 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4122 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4123 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4124 elem
= builtin_type (gdbarch
)->builtin_float
;
4125 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4126 elem
= builtin_type (gdbarch
)->builtin_double
;
4127 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4129 t
->set_is_vector (true);
4130 t
->set_name ("neon_q");
4131 tdep
->neon_quad_type
= t
;
4134 return tdep
->neon_quad_type
;
4137 /* Return true if REGNUM is a Q pseudo register. Return false
4140 REGNUM is the raw register number and not a pseudo-relative register
4144 is_q_pseudo (struct gdbarch
*gdbarch
, int regnum
)
4146 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4148 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4149 MVE (Q0~Q7) features. */
4150 if (tdep
->have_q_pseudos
4151 && regnum
>= tdep
->q_pseudo_base
4152 && regnum
< (tdep
->q_pseudo_base
+ tdep
->q_pseudo_count
))
4158 /* Return true if REGNUM is a VFP S pseudo register. Return false
4161 REGNUM is the raw register number and not a pseudo-relative register
4165 is_s_pseudo (struct gdbarch
*gdbarch
, int regnum
)
4167 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4169 if (tdep
->have_s_pseudos
4170 && regnum
>= tdep
->s_pseudo_base
4171 && regnum
< (tdep
->s_pseudo_base
+ tdep
->s_pseudo_count
))
4177 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4180 REGNUM is the raw register number and not a pseudo-relative register
4184 is_mve_pseudo (struct gdbarch
*gdbarch
, int regnum
)
4186 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4189 && regnum
>= tdep
->mve_pseudo_base
4190 && regnum
< tdep
->mve_pseudo_base
+ tdep
->mve_pseudo_count
)
4196 /* Return the GDB type object for the "standard" data type of data in
4199 static struct type
*
4200 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4202 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4204 if (is_s_pseudo (gdbarch
, regnum
))
4205 return builtin_type (gdbarch
)->builtin_float
;
4207 if (is_q_pseudo (gdbarch
, regnum
))
4208 return arm_neon_quad_type (gdbarch
);
4210 if (is_mve_pseudo (gdbarch
, regnum
))
4211 return builtin_type (gdbarch
)->builtin_int16
;
4213 /* If the target description has register information, we are only
4214 in this function so that we can override the types of
4215 double-precision registers for NEON. */
4216 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4218 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4220 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4221 && t
->code () == TYPE_CODE_FLT
4223 return arm_neon_double_type (gdbarch
);
4228 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4230 if (!tdep
->have_fpa_registers
)
4231 return builtin_type (gdbarch
)->builtin_void
;
4233 return arm_ext_type (gdbarch
);
4235 else if (regnum
== ARM_SP_REGNUM
)
4236 return builtin_type (gdbarch
)->builtin_data_ptr
;
4237 else if (regnum
== ARM_PC_REGNUM
)
4238 return builtin_type (gdbarch
)->builtin_func_ptr
;
4239 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4240 /* These registers are only supported on targets which supply
4241 an XML description. */
4242 return builtin_type (gdbarch
)->builtin_int0
;
4244 return builtin_type (gdbarch
)->builtin_uint32
;
4247 /* Map a DWARF register REGNUM onto the appropriate GDB register
4251 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4253 /* Core integer regs. */
4254 if (reg
>= 0 && reg
<= 15)
4257 /* Legacy FPA encoding. These were once used in a way which
4258 overlapped with VFP register numbering, so their use is
4259 discouraged, but GDB doesn't support the ARM toolchain
4260 which used them for VFP. */
4261 if (reg
>= 16 && reg
<= 23)
4262 return ARM_F0_REGNUM
+ reg
- 16;
4264 /* New assignments for the FPA registers. */
4265 if (reg
>= 96 && reg
<= 103)
4266 return ARM_F0_REGNUM
+ reg
- 96;
4268 /* WMMX register assignments. */
4269 if (reg
>= 104 && reg
<= 111)
4270 return ARM_WCGR0_REGNUM
+ reg
- 104;
4272 if (reg
>= 112 && reg
<= 127)
4273 return ARM_WR0_REGNUM
+ reg
- 112;
4275 if (reg
>= 192 && reg
<= 199)
4276 return ARM_WC0_REGNUM
+ reg
- 192;
4278 /* VFP v2 registers. A double precision value is actually
4279 in d1 rather than s2, but the ABI only defines numbering
4280 for the single precision registers. This will "just work"
4281 in GDB for little endian targets (we'll read eight bytes,
4282 starting in s0 and then progressing to s1), but will be
4283 reversed on big endian targets with VFP. This won't
4284 be a problem for the new Neon quad registers; you're supposed
4285 to use DW_OP_piece for those. */
4286 if (reg
>= 64 && reg
<= 95)
4290 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4291 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4295 /* VFP v3 / Neon registers. This range is also used for VFP v2
4296 registers, except that it now describes d0 instead of s0. */
4297 if (reg
>= 256 && reg
<= 287)
4301 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4302 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4309 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4311 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4314 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4316 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4317 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4319 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4320 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4322 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4323 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4325 if (reg
< NUM_GREGS
)
4326 return SIM_ARM_R0_REGNUM
+ reg
;
4329 if (reg
< NUM_FREGS
)
4330 return SIM_ARM_FP0_REGNUM
+ reg
;
4333 if (reg
< NUM_SREGS
)
4334 return SIM_ARM_FPS_REGNUM
+ reg
;
4337 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4340 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4341 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4342 NULL if an error occurs. BUF is freed. */
4345 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4346 int old_len
, int new_len
)
4349 int bytes_to_read
= new_len
- old_len
;
4351 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4352 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4354 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4362 /* An IT block is at most the 2-byte IT instruction followed by
4363 four 4-byte instructions. The furthest back we must search to
4364 find an IT block that affects the current instruction is thus
4365 2 + 3 * 4 == 14 bytes. */
4366 #define MAX_IT_BLOCK_PREFIX 14
4368 /* Use a quick scan if there are more than this many bytes of
4370 #define IT_SCAN_THRESHOLD 32
4372 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4373 A breakpoint in an IT block may not be hit, depending on the
4376 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4380 CORE_ADDR boundary
, func_start
;
4382 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4383 int i
, any
, last_it
, last_it_count
;
4384 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
4386 /* If we are using BKPT breakpoints, none of this is necessary. */
4387 if (tdep
->thumb2_breakpoint
== NULL
)
4390 /* ARM mode does not have this problem. */
4391 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4394 /* We are setting a breakpoint in Thumb code that could potentially
4395 contain an IT block. The first step is to find how much Thumb
4396 code there is; we do not need to read outside of known Thumb
4398 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4400 /* Thumb-2 code must have mapping symbols to have a chance. */
4403 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4405 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4406 && func_start
> boundary
)
4407 boundary
= func_start
;
4409 /* Search for a candidate IT instruction. We have to do some fancy
4410 footwork to distinguish a real IT instruction from the second
4411 half of a 32-bit instruction, but there is no need for that if
4412 there's no candidate. */
4413 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4415 /* No room for an IT instruction. */
4418 buf
= (gdb_byte
*) xmalloc (buf_len
);
4419 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4422 for (i
= 0; i
< buf_len
; i
+= 2)
4424 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4425 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4438 /* OK, the code bytes before this instruction contain at least one
4439 halfword which resembles an IT instruction. We know that it's
4440 Thumb code, but there are still two possibilities. Either the
4441 halfword really is an IT instruction, or it is the second half of
4442 a 32-bit Thumb instruction. The only way we can tell is to
4443 scan forwards from a known instruction boundary. */
4444 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4448 /* There's a lot of code before this instruction. Start with an
4449 optimistic search; it's easy to recognize halfwords that can
4450 not be the start of a 32-bit instruction, and use that to
4451 lock on to the instruction boundaries. */
4452 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4455 buf_len
= IT_SCAN_THRESHOLD
;
4458 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4460 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4461 if (thumb_insn_size (inst1
) == 2)
4468 /* At this point, if DEFINITE, BUF[I] is the first place we
4469 are sure that we know the instruction boundaries, and it is far
4470 enough from BPADDR that we could not miss an IT instruction
4471 affecting BPADDR. If ! DEFINITE, give up - start from a
4475 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4479 buf_len
= bpaddr
- boundary
;
4485 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4488 buf_len
= bpaddr
- boundary
;
4492 /* Scan forwards. Find the last IT instruction before BPADDR. */
4497 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4499 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4504 else if (inst1
& 0x0002)
4506 else if (inst1
& 0x0004)
4511 i
+= thumb_insn_size (inst1
);
4517 /* There wasn't really an IT instruction after all. */
4520 if (last_it_count
< 1)
4521 /* It was too far away. */
4524 /* This really is a trouble spot. Move the breakpoint to the IT
4526 return bpaddr
- buf_len
+ last_it
;
4529 /* ARM displaced stepping support.
4531 Generally ARM displaced stepping works as follows:
4533 1. When an instruction is to be single-stepped, it is first decoded by
4534 arm_process_displaced_insn. Depending on the type of instruction, it is
4535 then copied to a scratch location, possibly in a modified form. The
4536 copy_* set of functions performs such modification, as necessary. A
4537 breakpoint is placed after the modified instruction in the scratch space
4538 to return control to GDB. Note in particular that instructions which
4539 modify the PC will no longer do so after modification.
4541 2. The instruction is single-stepped, by setting the PC to the scratch
4542 location address, and resuming. Control returns to GDB when the
4545 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4546 function used for the current instruction. This function's job is to
4547 put the CPU/memory state back to what it would have been if the
4548 instruction had been executed unmodified in its original location. */
4550 /* NOP instruction (mov r0, r0). */
4551 #define ARM_NOP 0xe1a00000
4552 #define THUMB_NOP 0x4600
4554 /* Helper for register reads for displaced stepping. In particular, this
4555 returns the PC as it would be seen by the instruction at its original
4559 displaced_read_reg (regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
,
4563 CORE_ADDR from
= dsc
->insn_addr
;
4565 if (regno
== ARM_PC_REGNUM
)
4567 /* Compute pipeline offset:
4568 - When executing an ARM instruction, PC reads as the address of the
4569 current instruction plus 8.
4570 - When executing a Thumb instruction, PC reads as the address of the
4571 current instruction plus 4. */
4578 displaced_debug_printf ("read pc value %.8lx",
4579 (unsigned long) from
);
4580 return (ULONGEST
) from
;
4584 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4586 displaced_debug_printf ("read r%d value %.8lx",
4587 regno
, (unsigned long) ret
);
4594 displaced_in_arm_mode (struct regcache
*regs
)
4597 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4599 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4601 return (ps
& t_bit
) == 0;
4604 /* Write to the PC as from a branch instruction. */
4607 branch_write_pc (regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
,
4611 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4612 architecture versions < 6. */
4613 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4614 val
& ~(ULONGEST
) 0x3);
4616 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4617 val
& ~(ULONGEST
) 0x1);
4620 /* Write to the PC as from a branch-exchange instruction. */
4623 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4626 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4628 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4632 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4633 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4635 else if ((val
& 2) == 0)
4637 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4638 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4642 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4643 mode, align dest to 4 bytes). */
4644 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4645 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4646 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4650 /* Write to the PC as if from a load instruction. */
4653 load_write_pc (regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
,
4656 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4657 bx_write_pc (regs
, val
);
4659 branch_write_pc (regs
, dsc
, val
);
4662 /* Write to the PC as if from an ALU instruction. */
4665 alu_write_pc (regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
,
4668 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4669 bx_write_pc (regs
, val
);
4671 branch_write_pc (regs
, dsc
, val
);
4674 /* Helper for writing to registers for displaced stepping. Writing to the PC
4675 has a varying effects depending on the instruction which does the write:
4676 this is controlled by the WRITE_PC argument. */
4679 displaced_write_reg (regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
,
4680 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4682 if (regno
== ARM_PC_REGNUM
)
4684 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val
);
4688 case BRANCH_WRITE_PC
:
4689 branch_write_pc (regs
, dsc
, val
);
4693 bx_write_pc (regs
, val
);
4697 load_write_pc (regs
, dsc
, val
);
4701 alu_write_pc (regs
, dsc
, val
);
4704 case CANNOT_WRITE_PC
:
4705 warning (_("Instruction wrote to PC in an unexpected way when "
4706 "single-stepping"));
4710 internal_error (__FILE__
, __LINE__
,
4711 _("Invalid argument to displaced_write_reg"));
4714 dsc
->wrote_to_pc
= 1;
4718 displaced_debug_printf ("writing r%d value %.8lx",
4719 regno
, (unsigned long) val
);
4720 regcache_cooked_write_unsigned (regs
, regno
, val
);
4724 /* This function is used to concisely determine if an instruction INSN
4725 references PC. Register fields of interest in INSN should have the
4726 corresponding fields of BITMASK set to 0b1111. The function
4727 returns return 1 if any of these fields in INSN reference the PC
4728 (also 0b1111, r15), else it returns 0. */
4731 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4733 uint32_t lowbit
= 1;
4735 while (bitmask
!= 0)
4739 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4745 mask
= lowbit
* 0xf;
4747 if ((insn
& mask
) == mask
)
4756 /* The simplest copy function. Many instructions have the same effect no
4757 matter what address they are executed at: in those cases, use this. */
4760 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
, const char *iname
,
4761 arm_displaced_step_copy_insn_closure
*dsc
)
4763 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4764 (unsigned long) insn
, iname
);
4766 dsc
->modinsn
[0] = insn
;
4772 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4773 uint16_t insn2
, const char *iname
,
4774 arm_displaced_step_copy_insn_closure
*dsc
)
4776 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4777 "unmodified", insn1
, insn2
, iname
);
4779 dsc
->modinsn
[0] = insn1
;
4780 dsc
->modinsn
[1] = insn2
;
4786 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4789 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4791 arm_displaced_step_copy_insn_closure
*dsc
)
4793 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4796 dsc
->modinsn
[0] = insn
;
4801 /* Preload instructions with immediate offset. */
4804 cleanup_preload (struct gdbarch
*gdbarch
, regcache
*regs
,
4805 arm_displaced_step_copy_insn_closure
*dsc
)
4807 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4808 if (!dsc
->u
.preload
.immed
)
4809 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4813 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4814 arm_displaced_step_copy_insn_closure
*dsc
, unsigned int rn
)
4817 /* Preload instructions:
4819 {pli/pld} [rn, #+/-imm]
4821 {pli/pld} [r0, #+/-imm]. */
4823 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4824 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4825 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4826 dsc
->u
.preload
.immed
= 1;
4828 dsc
->cleanup
= &cleanup_preload
;
4832 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4833 arm_displaced_step_copy_insn_closure
*dsc
)
4835 unsigned int rn
= bits (insn
, 16, 19);
4837 if (!insn_references_pc (insn
, 0x000f0000ul
))
4838 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4840 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn
);
4842 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4844 install_preload (gdbarch
, regs
, dsc
, rn
);
4850 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4851 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
4853 unsigned int rn
= bits (insn1
, 0, 3);
4854 unsigned int u_bit
= bit (insn1
, 7);
4855 int imm12
= bits (insn2
, 0, 11);
4858 if (rn
!= ARM_PC_REGNUM
)
4859 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4861 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4862 PLD (literal) Encoding T1. */
4863 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4864 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4870 /* Rewrite instruction {pli/pld} PC imm12 into:
4871 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4875 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4877 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4878 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4880 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4882 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4883 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4884 dsc
->u
.preload
.immed
= 0;
4886 /* {pli/pld} [r0, r1] */
4887 dsc
->modinsn
[0] = insn1
& 0xfff0;
4888 dsc
->modinsn
[1] = 0xf001;
4891 dsc
->cleanup
= &cleanup_preload
;
4895 /* Preload instructions with register offset. */
4898 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4899 arm_displaced_step_copy_insn_closure
*dsc
, unsigned int rn
,
4902 ULONGEST rn_val
, rm_val
;
4904 /* Preload register-offset instructions:
4906 {pli/pld} [rn, rm {, shift}]
4908 {pli/pld} [r0, r1 {, shift}]. */
4910 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4911 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4912 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4913 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4914 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4915 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4916 dsc
->u
.preload
.immed
= 0;
4918 dsc
->cleanup
= &cleanup_preload
;
4922 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4923 struct regcache
*regs
,
4924 arm_displaced_step_copy_insn_closure
*dsc
)
4926 unsigned int rn
= bits (insn
, 16, 19);
4927 unsigned int rm
= bits (insn
, 0, 3);
4930 if (!insn_references_pc (insn
, 0x000f000ful
))
4931 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4933 displaced_debug_printf ("copying preload insn %.8lx",
4934 (unsigned long) insn
);
4936 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4938 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4942 /* Copy/cleanup coprocessor load and store instructions. */
4945 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4946 struct regcache
*regs
,
4947 arm_displaced_step_copy_insn_closure
*dsc
)
4949 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4951 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4953 if (dsc
->u
.ldst
.writeback
)
4954 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4958 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4959 arm_displaced_step_copy_insn_closure
*dsc
,
4960 int writeback
, unsigned int rn
)
4964 /* Coprocessor load/store instructions:
4966 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4968 {stc/stc2} [r0, #+/-imm].
4970 ldc/ldc2 are handled identically. */
4972 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4973 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4974 /* PC should be 4-byte aligned. */
4975 rn_val
= rn_val
& 0xfffffffc;
4976 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4978 dsc
->u
.ldst
.writeback
= writeback
;
4979 dsc
->u
.ldst
.rn
= rn
;
4981 dsc
->cleanup
= &cleanup_copro_load_store
;
4985 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4986 struct regcache
*regs
,
4987 arm_displaced_step_copy_insn_closure
*dsc
)
4989 unsigned int rn
= bits (insn
, 16, 19);
4991 if (!insn_references_pc (insn
, 0x000f0000ul
))
4992 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4994 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4995 (unsigned long) insn
);
4997 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4999 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
5005 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
5006 uint16_t insn2
, struct regcache
*regs
,
5007 arm_displaced_step_copy_insn_closure
*dsc
)
5009 unsigned int rn
= bits (insn1
, 0, 3);
5011 if (rn
!= ARM_PC_REGNUM
)
5012 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
5013 "copro load/store", dsc
);
5015 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5018 dsc
->modinsn
[0] = insn1
& 0xfff0;
5019 dsc
->modinsn
[1] = insn2
;
5022 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5023 doesn't support writeback, so pass 0. */
5024 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
5029 /* Clean up branch instructions (actually perform the branch, by setting
5033 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5034 arm_displaced_step_copy_insn_closure
*dsc
)
5036 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5037 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
5038 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
5039 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
5044 if (dsc
->u
.branch
.link
)
5046 /* The value of LR should be the next insn of current one. In order
5047 not to confuse logic handling later insn `bx lr', if current insn mode
5048 is Thumb, the bit 0 of LR value should be set to 1. */
5049 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
5052 next_insn_addr
|= 0x1;
5054 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
5058 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
5061 /* Copy B/BL/BLX instructions with immediate destinations. */
5064 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5065 arm_displaced_step_copy_insn_closure
*dsc
,
5066 unsigned int cond
, int exchange
, int link
, long offset
)
5068 /* Implement "BL<cond> <label>" as:
5070 Preparation: cond <- instruction condition
5071 Insn: mov r0, r0 (nop)
5072 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5074 B<cond> similar, but don't set r14 in cleanup. */
5076 dsc
->u
.branch
.cond
= cond
;
5077 dsc
->u
.branch
.link
= link
;
5078 dsc
->u
.branch
.exchange
= exchange
;
5080 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
5081 if (link
&& exchange
)
5082 /* For BLX, offset is computed from the Align (PC, 4). */
5083 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
5086 dsc
->u
.branch
.dest
+= 4 + offset
;
5088 dsc
->u
.branch
.dest
+= 8 + offset
;
5090 dsc
->cleanup
= &cleanup_branch
;
5093 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
5094 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
5096 unsigned int cond
= bits (insn
, 28, 31);
5097 int exchange
= (cond
== 0xf);
5098 int link
= exchange
|| bit (insn
, 24);
5101 displaced_debug_printf ("copying %s immediate insn %.8lx",
5102 (exchange
) ? "blx" : (link
) ? "bl" : "b",
5103 (unsigned long) insn
);
5105 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5106 then arrange the switch into Thumb mode. */
5107 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
5109 offset
= bits (insn
, 0, 23) << 2;
5111 if (bit (offset
, 25))
5112 offset
= offset
| ~0x3ffffff;
5114 dsc
->modinsn
[0] = ARM_NOP
;
5116 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5121 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
5122 uint16_t insn2
, struct regcache
*regs
,
5123 arm_displaced_step_copy_insn_closure
*dsc
)
5125 int link
= bit (insn2
, 14);
5126 int exchange
= link
&& !bit (insn2
, 12);
5129 int j1
= bit (insn2
, 13);
5130 int j2
= bit (insn2
, 11);
5131 int s
= sbits (insn1
, 10, 10);
5132 int i1
= !(j1
^ bit (insn1
, 10));
5133 int i2
= !(j2
^ bit (insn1
, 10));
5135 if (!link
&& !exchange
) /* B */
5137 offset
= (bits (insn2
, 0, 10) << 1);
5138 if (bit (insn2
, 12)) /* Encoding T4 */
5140 offset
|= (bits (insn1
, 0, 9) << 12)
5146 else /* Encoding T3 */
5148 offset
|= (bits (insn1
, 0, 5) << 12)
5152 cond
= bits (insn1
, 6, 9);
5157 offset
= (bits (insn1
, 0, 9) << 12);
5158 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
5159 offset
|= exchange
?
5160 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
5163 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5164 link
? (exchange
) ? "blx" : "bl" : "b",
5165 insn1
, insn2
, offset
);
5167 dsc
->modinsn
[0] = THUMB_NOP
;
5169 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5173 /* Copy B Thumb instructions. */
5175 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
5176 arm_displaced_step_copy_insn_closure
*dsc
)
5178 unsigned int cond
= 0;
5180 unsigned short bit_12_15
= bits (insn
, 12, 15);
5181 CORE_ADDR from
= dsc
->insn_addr
;
5183 if (bit_12_15
== 0xd)
5185 /* offset = SignExtend (imm8:0, 32) */
5186 offset
= sbits ((insn
<< 1), 0, 8);
5187 cond
= bits (insn
, 8, 11);
5189 else if (bit_12_15
== 0xe) /* Encoding T2 */
5191 offset
= sbits ((insn
<< 1), 0, 11);
5195 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5198 dsc
->u
.branch
.cond
= cond
;
5199 dsc
->u
.branch
.link
= 0;
5200 dsc
->u
.branch
.exchange
= 0;
5201 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5203 dsc
->modinsn
[0] = THUMB_NOP
;
5205 dsc
->cleanup
= &cleanup_branch
;
5210 /* Copy BX/BLX with register-specified destinations. */
5213 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5214 arm_displaced_step_copy_insn_closure
*dsc
, int link
,
5215 unsigned int cond
, unsigned int rm
)
5217 /* Implement {BX,BLX}<cond> <reg>" as:
5219 Preparation: cond <- instruction condition
5220 Insn: mov r0, r0 (nop)
5221 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5223 Don't set r14 in cleanup for BX. */
5225 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5227 dsc
->u
.branch
.cond
= cond
;
5228 dsc
->u
.branch
.link
= link
;
5230 dsc
->u
.branch
.exchange
= 1;
5232 dsc
->cleanup
= &cleanup_branch
;
5236 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5237 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
5239 unsigned int cond
= bits (insn
, 28, 31);
5242 int link
= bit (insn
, 5);
5243 unsigned int rm
= bits (insn
, 0, 3);
5245 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn
);
5247 dsc
->modinsn
[0] = ARM_NOP
;
5249 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5254 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5255 struct regcache
*regs
,
5256 arm_displaced_step_copy_insn_closure
*dsc
)
5258 int link
= bit (insn
, 7);
5259 unsigned int rm
= bits (insn
, 3, 6);
5261 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn
);
5263 dsc
->modinsn
[0] = THUMB_NOP
;
5265 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5271 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5274 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5275 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
5277 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5278 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5279 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5280 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5284 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5285 arm_displaced_step_copy_insn_closure
*dsc
)
5287 unsigned int rn
= bits (insn
, 16, 19);
5288 unsigned int rd
= bits (insn
, 12, 15);
5289 unsigned int op
= bits (insn
, 21, 24);
5290 int is_mov
= (op
== 0xd);
5291 ULONGEST rd_val
, rn_val
;
5293 if (!insn_references_pc (insn
, 0x000ff000ul
))
5294 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5296 displaced_debug_printf ("copying immediate %s insn %.8lx",
5297 is_mov
? "move" : "ALU",
5298 (unsigned long) insn
);
5300 /* Instruction is of form:
5302 <op><cond> rd, [rn,] #imm
5306 Preparation: tmp1, tmp2 <- r0, r1;
5308 Insn: <op><cond> r0, r1, #imm
5309 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5312 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5313 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5314 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5315 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5316 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5317 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5321 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5323 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5325 dsc
->cleanup
= &cleanup_alu_imm
;
5331 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5332 uint16_t insn2
, struct regcache
*regs
,
5333 arm_displaced_step_copy_insn_closure
*dsc
)
5335 unsigned int op
= bits (insn1
, 5, 8);
5336 unsigned int rn
, rm
, rd
;
5337 ULONGEST rd_val
, rn_val
;
5339 rn
= bits (insn1
, 0, 3); /* Rn */
5340 rm
= bits (insn2
, 0, 3); /* Rm */
5341 rd
= bits (insn2
, 8, 11); /* Rd */
5343 /* This routine is only called for instruction MOV. */
5344 gdb_assert (op
== 0x2 && rn
== 0xf);
5346 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5347 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5349 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1
, insn2
);
5351 /* Instruction is of form:
5353 <op><cond> rd, [rn,] #imm
5357 Preparation: tmp1, tmp2 <- r0, r1;
5359 Insn: <op><cond> r0, r1, #imm
5360 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5363 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5364 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5365 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5366 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5367 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5368 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5371 dsc
->modinsn
[0] = insn1
;
5372 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5375 dsc
->cleanup
= &cleanup_alu_imm
;
5380 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5383 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5384 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
5389 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5391 for (i
= 0; i
< 3; i
++)
5392 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5394 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5398 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5399 arm_displaced_step_copy_insn_closure
*dsc
,
5400 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5402 ULONGEST rd_val
, rn_val
, rm_val
;
5404 /* Instruction is of form:
5406 <op><cond> rd, [rn,] rm [, <shift>]
5410 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5411 r0, r1, r2 <- rd, rn, rm
5412 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5413 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5416 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5417 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5418 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5419 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5420 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5421 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5422 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5423 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5424 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5427 dsc
->cleanup
= &cleanup_alu_reg
;
5431 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5432 arm_displaced_step_copy_insn_closure
*dsc
)
5434 unsigned int op
= bits (insn
, 21, 24);
5435 int is_mov
= (op
== 0xd);
5437 if (!insn_references_pc (insn
, 0x000ff00ful
))
5438 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5440 displaced_debug_printf ("copying reg %s insn %.8lx",
5441 is_mov
? "move" : "ALU", (unsigned long) insn
);
5444 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5446 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5448 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5454 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5455 struct regcache
*regs
,
5456 arm_displaced_step_copy_insn_closure
*dsc
)
5460 rm
= bits (insn
, 3, 6);
5461 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5463 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5464 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5466 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn
);
5468 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5470 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5475 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5478 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5479 struct regcache
*regs
,
5480 arm_displaced_step_copy_insn_closure
*dsc
)
5482 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5485 for (i
= 0; i
< 4; i
++)
5486 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5488 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5492 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5493 arm_displaced_step_copy_insn_closure
*dsc
,
5494 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5498 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5500 /* Instruction is of form:
5502 <op><cond> rd, [rn,] rm, <shift> rs
5506 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5507 r0, r1, r2, r3 <- rd, rn, rm, rs
5508 Insn: <op><cond> r0, r1, r2, <shift> r3
5510 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5514 for (i
= 0; i
< 4; i
++)
5515 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5517 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5518 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5519 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5520 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5521 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5522 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5523 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5524 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5526 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5530 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5531 struct regcache
*regs
,
5532 arm_displaced_step_copy_insn_closure
*dsc
)
5534 unsigned int op
= bits (insn
, 21, 24);
5535 int is_mov
= (op
== 0xd);
5536 unsigned int rd
, rn
, rm
, rs
;
5538 if (!insn_references_pc (insn
, 0x000fff0ful
))
5539 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5541 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5542 is_mov
? "move" : "ALU",
5543 (unsigned long) insn
);
5545 rn
= bits (insn
, 16, 19);
5546 rm
= bits (insn
, 0, 3);
5547 rs
= bits (insn
, 8, 11);
5548 rd
= bits (insn
, 12, 15);
5551 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5553 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5555 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5560 /* Clean up load instructions. */
5563 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5564 arm_displaced_step_copy_insn_closure
*dsc
)
5566 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5568 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5569 if (dsc
->u
.ldst
.xfersize
== 8)
5570 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5571 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5573 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5574 if (dsc
->u
.ldst
.xfersize
> 4)
5575 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5576 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5577 if (!dsc
->u
.ldst
.immed
)
5578 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5580 /* Handle register writeback. */
5581 if (dsc
->u
.ldst
.writeback
)
5582 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5583 /* Put result in right place. */
5584 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5585 if (dsc
->u
.ldst
.xfersize
== 8)
5586 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5589 /* Clean up store instructions. */
5592 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5593 arm_displaced_step_copy_insn_closure
*dsc
)
5595 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5597 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5598 if (dsc
->u
.ldst
.xfersize
> 4)
5599 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5600 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5601 if (!dsc
->u
.ldst
.immed
)
5602 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5603 if (!dsc
->u
.ldst
.restore_r4
)
5604 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5607 if (dsc
->u
.ldst
.writeback
)
5608 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5611 /* Copy "extra" load/store instructions. These are halfword/doubleword
5612 transfers, which have a different encoding to byte/word transfers. */
5615 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5616 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
5618 unsigned int op1
= bits (insn
, 20, 24);
5619 unsigned int op2
= bits (insn
, 5, 6);
5620 unsigned int rt
= bits (insn
, 12, 15);
5621 unsigned int rn
= bits (insn
, 16, 19);
5622 unsigned int rm
= bits (insn
, 0, 3);
5623 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5624 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5625 int immed
= (op1
& 0x4) != 0;
5627 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5629 if (!insn_references_pc (insn
, 0x000ff00ful
))
5630 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5632 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5633 unprivileged
? "unprivileged " : "",
5634 (unsigned long) insn
);
5636 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5639 internal_error (__FILE__
, __LINE__
,
5640 _("copy_extra_ld_st: instruction decode error"));
5642 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5643 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5644 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5646 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5648 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5649 if (bytesize
[opcode
] == 8)
5650 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5651 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5653 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5655 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5656 if (bytesize
[opcode
] == 8)
5657 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5658 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5660 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5663 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5664 dsc
->u
.ldst
.rn
= rn
;
5665 dsc
->u
.ldst
.immed
= immed
;
5666 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5667 dsc
->u
.ldst
.restore_r4
= 0;
5670 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5672 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5673 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5675 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5677 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5678 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5680 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5685 /* Copy byte/half word/word loads and stores. */
5688 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5689 arm_displaced_step_copy_insn_closure
*dsc
, int load
,
5690 int immed
, int writeback
, int size
, int usermode
,
5691 int rt
, int rm
, int rn
)
5693 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5695 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5696 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5698 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5700 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5702 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5703 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5705 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5707 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5708 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5710 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5712 dsc
->u
.ldst
.xfersize
= size
;
5713 dsc
->u
.ldst
.rn
= rn
;
5714 dsc
->u
.ldst
.immed
= immed
;
5715 dsc
->u
.ldst
.writeback
= writeback
;
5717 /* To write PC we can do:
5719 Before this sequence of instructions:
5720 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5721 r2 is the Rn value got from displaced_read_reg.
5723 Insn1: push {pc} Write address of STR instruction + offset on stack
5724 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5725 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5726 = addr(Insn1) + offset - addr(Insn3) - 8
5728 Insn4: add r4, r4, #8 r4 = offset - 8
5729 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5731 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5733 Otherwise we don't know what value to write for PC, since the offset is
5734 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5735 of this can be found in Section "Saving from r15" in
5736 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5738 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5743 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5744 uint16_t insn2
, struct regcache
*regs
,
5745 arm_displaced_step_copy_insn_closure
*dsc
, int size
)
5747 unsigned int u_bit
= bit (insn1
, 7);
5748 unsigned int rt
= bits (insn2
, 12, 15);
5749 int imm12
= bits (insn2
, 0, 11);
5752 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5753 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5759 /* Rewrite instruction LDR Rt imm12 into:
5761 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5765 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5768 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5769 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5770 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5772 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5774 pc_val
= pc_val
& 0xfffffffc;
5776 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5777 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5781 dsc
->u
.ldst
.xfersize
= size
;
5782 dsc
->u
.ldst
.immed
= 0;
5783 dsc
->u
.ldst
.writeback
= 0;
5784 dsc
->u
.ldst
.restore_r4
= 0;
5786 /* LDR R0, R2, R3 */
5787 dsc
->modinsn
[0] = 0xf852;
5788 dsc
->modinsn
[1] = 0x3;
5791 dsc
->cleanup
= &cleanup_load
;
5797 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5798 uint16_t insn2
, struct regcache
*regs
,
5799 arm_displaced_step_copy_insn_closure
*dsc
,
5800 int writeback
, int immed
)
5802 unsigned int rt
= bits (insn2
, 12, 15);
5803 unsigned int rn
= bits (insn1
, 0, 3);
5804 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5805 /* In LDR (register), there is also a register Rm, which is not allowed to
5806 be PC, so we don't have to check it. */
5808 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5809 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5812 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5813 rt
, rn
, insn1
, insn2
);
5815 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5818 dsc
->u
.ldst
.restore_r4
= 0;
5821 /* ldr[b]<cond> rt, [rn, #imm], etc.
5823 ldr[b]<cond> r0, [r2, #imm]. */
5825 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5826 dsc
->modinsn
[1] = insn2
& 0x0fff;
5829 /* ldr[b]<cond> rt, [rn, rm], etc.
5831 ldr[b]<cond> r0, [r2, r3]. */
5833 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5834 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5844 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5845 struct regcache
*regs
,
5846 arm_displaced_step_copy_insn_closure
*dsc
,
5847 int load
, int size
, int usermode
)
5849 int immed
= !bit (insn
, 25);
5850 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5851 unsigned int rt
= bits (insn
, 12, 15);
5852 unsigned int rn
= bits (insn
, 16, 19);
5853 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5855 if (!insn_references_pc (insn
, 0x000ff00ful
))
5856 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5858 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5859 load
? (size
== 1 ? "ldrb" : "ldr")
5860 : (size
== 1 ? "strb" : "str"),
5861 usermode
? "t" : "",
5863 (unsigned long) insn
);
5865 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5866 usermode
, rt
, rm
, rn
);
5868 if (load
|| rt
!= ARM_PC_REGNUM
)
5870 dsc
->u
.ldst
.restore_r4
= 0;
5873 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5875 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5876 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5878 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5880 {ldr,str}[b]<cond> r0, [r2, r3]. */
5881 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5885 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5886 dsc
->u
.ldst
.restore_r4
= 1;
5887 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5888 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5889 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5890 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5891 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5895 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5897 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5902 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5907 /* Cleanup LDM instructions with fully-populated register list. This is an
5908 unfortunate corner case: it's impossible to implement correctly by modifying
5909 the instruction. The issue is as follows: we have an instruction,
5913 which we must rewrite to avoid loading PC. A possible solution would be to
5914 do the load in two halves, something like (with suitable cleanup
5918 ldm[id][ab] r8!, {r0-r7}
5920 ldm[id][ab] r8, {r7-r14}
5923 but at present there's no suitable place for <temp>, since the scratch space
5924 is overwritten before the cleanup routine is called. For now, we simply
5925 emulate the instruction. */
5928 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5929 arm_displaced_step_copy_insn_closure
*dsc
)
5931 int inc
= dsc
->u
.block
.increment
;
5932 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5933 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5934 uint32_t regmask
= dsc
->u
.block
.regmask
;
5935 int regno
= inc
? 0 : 15;
5936 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5937 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5938 && (regmask
& 0x8000) != 0;
5939 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5940 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5941 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5946 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5947 sensible we can do here. Complain loudly. */
5948 if (exception_return
)
5949 error (_("Cannot single-step exception return"));
5951 /* We don't handle any stores here for now. */
5952 gdb_assert (dsc
->u
.block
.load
!= 0);
5954 displaced_debug_printf ("emulating block transfer: %s %s %s",
5955 dsc
->u
.block
.load
? "ldm" : "stm",
5956 dsc
->u
.block
.increment
? "inc" : "dec",
5957 dsc
->u
.block
.before
? "before" : "after");
5964 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5967 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5970 xfer_addr
+= bump_before
;
5972 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5973 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5975 xfer_addr
+= bump_after
;
5977 regmask
&= ~(1 << regno
);
5980 if (dsc
->u
.block
.writeback
)
5981 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5985 /* Clean up an STM which included the PC in the register list. */
5988 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5989 arm_displaced_step_copy_insn_closure
*dsc
)
5991 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5992 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5993 CORE_ADDR pc_stored_at
, transferred_regs
5994 = count_one_bits (dsc
->u
.block
.regmask
);
5995 CORE_ADDR stm_insn_addr
;
5998 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6000 /* If condition code fails, there's nothing else to do. */
6001 if (!store_executed
)
6004 if (dsc
->u
.block
.increment
)
6006 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
6008 if (dsc
->u
.block
.before
)
6013 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
6015 if (dsc
->u
.block
.before
)
6019 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
6020 stm_insn_addr
= dsc
->scratch_base
;
6021 offset
= pc_val
- stm_insn_addr
;
6023 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6026 /* Rewrite the stored PC to the proper value for the non-displaced original
6028 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
6029 dsc
->insn_addr
+ offset
);
6032 /* Clean up an LDM which includes the PC in the register list. We clumped all
6033 the registers in the transferred list into a contiguous range r0...rX (to
6034 avoid loading PC directly and losing control of the debugged program), so we
6035 must undo that here. */
6038 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
6039 struct regcache
*regs
,
6040 arm_displaced_step_copy_insn_closure
*dsc
)
6042 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6043 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
6044 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
6045 unsigned int regs_loaded
= count_one_bits (mask
);
6046 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
6048 /* The method employed here will fail if the register list is fully populated
6049 (we need to avoid loading PC directly). */
6050 gdb_assert (num_to_shuffle
< 16);
6055 clobbered
= (1 << num_to_shuffle
) - 1;
6057 while (num_to_shuffle
> 0)
6059 if ((mask
& (1 << write_reg
)) != 0)
6061 unsigned int read_reg
= num_to_shuffle
- 1;
6063 if (read_reg
!= write_reg
)
6065 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
6066 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
6067 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6068 read_reg
, write_reg
);
6071 displaced_debug_printf ("LDM: register r%d already in the right "
6072 "place", write_reg
);
6074 clobbered
&= ~(1 << write_reg
);
6082 /* Restore any registers we scribbled over. */
6083 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
6085 if ((clobbered
& (1 << write_reg
)) != 0)
6087 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
6089 displaced_debug_printf ("LDM: restored clobbered register r%d",
6091 clobbered
&= ~(1 << write_reg
);
6095 /* Perform register writeback manually. */
6096 if (dsc
->u
.block
.writeback
)
6098 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
6100 if (dsc
->u
.block
.increment
)
6101 new_rn_val
+= regs_loaded
* 4;
6103 new_rn_val
-= regs_loaded
* 4;
6105 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
6110 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6111 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6114 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
6115 struct regcache
*regs
,
6116 arm_displaced_step_copy_insn_closure
*dsc
)
6118 int load
= bit (insn
, 20);
6119 int user
= bit (insn
, 22);
6120 int increment
= bit (insn
, 23);
6121 int before
= bit (insn
, 24);
6122 int writeback
= bit (insn
, 21);
6123 int rn
= bits (insn
, 16, 19);
6125 /* Block transfers which don't mention PC can be run directly
6127 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
6128 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
6130 if (rn
== ARM_PC_REGNUM
)
6132 warning (_("displaced: Unpredictable LDM or STM with "
6133 "base register r15"));
6134 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
6137 displaced_debug_printf ("copying block transfer insn %.8lx",
6138 (unsigned long) insn
);
6140 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6141 dsc
->u
.block
.rn
= rn
;
6143 dsc
->u
.block
.load
= load
;
6144 dsc
->u
.block
.user
= user
;
6145 dsc
->u
.block
.increment
= increment
;
6146 dsc
->u
.block
.before
= before
;
6147 dsc
->u
.block
.writeback
= writeback
;
6148 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
6150 dsc
->u
.block
.regmask
= insn
& 0xffff;
6154 if ((insn
& 0xffff) == 0xffff)
6156 /* LDM with a fully-populated register list. This case is
6157 particularly tricky. Implement for now by fully emulating the
6158 instruction (which might not behave perfectly in all cases, but
6159 these instructions should be rare enough for that not to matter
6161 dsc
->modinsn
[0] = ARM_NOP
;
6163 dsc
->cleanup
= &cleanup_block_load_all
;
6167 /* LDM of a list of registers which includes PC. Implement by
6168 rewriting the list of registers to be transferred into a
6169 contiguous chunk r0...rX before doing the transfer, then shuffling
6170 registers into the correct places in the cleanup routine. */
6171 unsigned int regmask
= insn
& 0xffff;
6172 unsigned int num_in_list
= count_one_bits (regmask
), new_regmask
;
6175 for (i
= 0; i
< num_in_list
; i
++)
6176 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6178 /* Writeback makes things complicated. We need to avoid clobbering
6179 the base register with one of the registers in our modified
6180 register list, but just using a different register can't work in
6183 ldm r14!, {r0-r13,pc}
6185 which would need to be rewritten as:
6189 but that can't work, because there's no free register for N.
6191 Solve this by turning off the writeback bit, and emulating
6192 writeback manually in the cleanup routine. */
6197 new_regmask
= (1 << num_in_list
) - 1;
6199 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6200 "%.4x, modified list %.4x",
6201 rn
, writeback
? "!" : "",
6202 (int) insn
& 0xffff, new_regmask
);
6204 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6206 dsc
->cleanup
= &cleanup_block_load_pc
;
6211 /* STM of a list of registers which includes PC. Run the instruction
6212 as-is, but out of line: this will store the wrong value for the PC,
6213 so we must manually fix up the memory in the cleanup routine.
6214 Doing things this way has the advantage that we can auto-detect
6215 the offset of the PC write (which is architecture-dependent) in
6216 the cleanup routine. */
6217 dsc
->modinsn
[0] = insn
;
6219 dsc
->cleanup
= &cleanup_block_store_pc
;
6226 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6227 struct regcache
*regs
,
6228 arm_displaced_step_copy_insn_closure
*dsc
)
6230 int rn
= bits (insn1
, 0, 3);
6231 int load
= bit (insn1
, 4);
6232 int writeback
= bit (insn1
, 5);
6234 /* Block transfers which don't mention PC can be run directly
6236 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6237 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6239 if (rn
== ARM_PC_REGNUM
)
6241 warning (_("displaced: Unpredictable LDM or STM with "
6242 "base register r15"));
6243 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6244 "unpredictable ldm/stm", dsc
);
6247 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6250 /* Clear bit 13, since it should be always zero. */
6251 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6252 dsc
->u
.block
.rn
= rn
;
6254 dsc
->u
.block
.load
= load
;
6255 dsc
->u
.block
.user
= 0;
6256 dsc
->u
.block
.increment
= bit (insn1
, 7);
6257 dsc
->u
.block
.before
= bit (insn1
, 8);
6258 dsc
->u
.block
.writeback
= writeback
;
6259 dsc
->u
.block
.cond
= INST_AL
;
6260 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6264 if (dsc
->u
.block
.regmask
== 0xffff)
6266 /* This branch is impossible to happen. */
6271 unsigned int regmask
= dsc
->u
.block
.regmask
;
6272 unsigned int num_in_list
= count_one_bits (regmask
), new_regmask
;
6275 for (i
= 0; i
< num_in_list
; i
++)
6276 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6281 new_regmask
= (1 << num_in_list
) - 1;
6283 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6284 "%.4x, modified list %.4x",
6285 rn
, writeback
? "!" : "",
6286 (int) dsc
->u
.block
.regmask
, new_regmask
);
6288 dsc
->modinsn
[0] = insn1
;
6289 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6292 dsc
->cleanup
= &cleanup_block_load_pc
;
6297 dsc
->modinsn
[0] = insn1
;
6298 dsc
->modinsn
[1] = insn2
;
6300 dsc
->cleanup
= &cleanup_block_store_pc
;
6305 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6306 This is used to avoid a dependency on BFD's bfd_endian enum. */
6309 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6312 return read_memory_unsigned_integer (memaddr
, len
,
6313 (enum bfd_endian
) byte_order
);
6316 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6319 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6322 return gdbarch_addr_bits_remove (self
->regcache
->arch (), val
);
6325 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6328 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6333 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6336 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6338 return arm_is_thumb (self
->regcache
);
6341 /* single_step() is called just before we want to resume the inferior,
6342 if we want to single-step it but there is no hardware or kernel
6343 single-step support. We find the target of the coming instructions
6344 and breakpoint them. */
6346 std::vector
<CORE_ADDR
>
6347 arm_software_single_step (struct regcache
*regcache
)
6349 struct gdbarch
*gdbarch
= regcache
->arch ();
6350 struct arm_get_next_pcs next_pcs_ctx
;
6352 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6353 &arm_get_next_pcs_ops
,
6354 gdbarch_byte_order (gdbarch
),
6355 gdbarch_byte_order_for_code (gdbarch
),
6359 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6361 for (CORE_ADDR
&pc_ref
: next_pcs
)
6362 pc_ref
= gdbarch_addr_bits_remove (gdbarch
, pc_ref
);
6367 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6368 for Linux, where some SVC instructions must be treated specially. */
6371 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6372 arm_displaced_step_copy_insn_closure
*dsc
)
6374 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6376 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6377 (unsigned long) resume_addr
);
6379 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6383 /* Common copy routine for svc instruction. */
6386 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6387 arm_displaced_step_copy_insn_closure
*dsc
)
6389 /* Preparation: none.
6390 Insn: unmodified svc.
6391 Cleanup: pc <- insn_addr + insn_size. */
6393 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6395 dsc
->wrote_to_pc
= 1;
6397 /* Allow OS-specific code to override SVC handling. */
6398 if (dsc
->u
.svc
.copy_svc_os
)
6399 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6402 dsc
->cleanup
= &cleanup_svc
;
6408 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6409 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
6412 displaced_debug_printf ("copying svc insn %.8lx",
6413 (unsigned long) insn
);
6415 dsc
->modinsn
[0] = insn
;
6417 return install_svc (gdbarch
, regs
, dsc
);
6421 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6422 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
6425 displaced_debug_printf ("copying svc insn %.4x", insn
);
6427 dsc
->modinsn
[0] = insn
;
6429 return install_svc (gdbarch
, regs
, dsc
);
6432 /* Copy undefined instructions. */
6435 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6436 arm_displaced_step_copy_insn_closure
*dsc
)
6438 displaced_debug_printf ("copying undefined insn %.8lx",
6439 (unsigned long) insn
);
6441 dsc
->modinsn
[0] = insn
;
6447 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6448 arm_displaced_step_copy_insn_closure
*dsc
)
6451 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6452 (unsigned short) insn1
, (unsigned short) insn2
);
6454 dsc
->modinsn
[0] = insn1
;
6455 dsc
->modinsn
[1] = insn2
;
6461 /* Copy unpredictable instructions. */
6464 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6465 arm_displaced_step_copy_insn_closure
*dsc
)
6467 displaced_debug_printf ("copying unpredictable insn %.8lx",
6468 (unsigned long) insn
);
6470 dsc
->modinsn
[0] = insn
;
6475 /* The decode_* functions are instruction decoding helpers. They mostly follow
6476 the presentation in the ARM ARM. */
6479 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6480 struct regcache
*regs
,
6481 arm_displaced_step_copy_insn_closure
*dsc
)
6483 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6484 unsigned int rn
= bits (insn
, 16, 19);
6486 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0x1) == 0x0)
6487 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6488 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0x1) == 0x1)
6489 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6490 else if ((op1
& 0x60) == 0x20)
6491 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6492 else if ((op1
& 0x71) == 0x40)
6493 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6495 else if ((op1
& 0x77) == 0x41)
6496 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6497 else if ((op1
& 0x77) == 0x45)
6498 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6499 else if ((op1
& 0x77) == 0x51)
6502 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6504 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6506 else if ((op1
& 0x77) == 0x55)
6507 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6508 else if (op1
== 0x57)
6511 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6512 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6513 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6514 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6515 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6517 else if ((op1
& 0x63) == 0x43)
6518 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6519 else if ((op2
& 0x1) == 0x0)
6520 switch (op1
& ~0x80)
6523 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6525 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6526 case 0x71: case 0x75:
6528 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6529 case 0x63: case 0x67: case 0x73: case 0x77:
6530 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6532 return arm_copy_undef (gdbarch
, insn
, dsc
);
6535 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6539 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6540 struct regcache
*regs
,
6541 arm_displaced_step_copy_insn_closure
*dsc
)
6543 if (bit (insn
, 27) == 0)
6544 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6545 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6546 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6549 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6552 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6554 case 0x4: case 0x5: case 0x6: case 0x7:
6555 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6558 switch ((insn
& 0xe00000) >> 21)
6560 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6562 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6565 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6568 return arm_copy_undef (gdbarch
, insn
, dsc
);
6573 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6574 switch ((insn
& 0xe00000) >> 21)
6577 /* ldc/ldc2 imm (undefined for rn == pc). */
6578 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6579 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6582 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6584 case 0x4: case 0x5: case 0x6: case 0x7:
6585 /* ldc/ldc2 lit (undefined for rn != pc). */
6586 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6587 : arm_copy_undef (gdbarch
, insn
, dsc
);
6590 return arm_copy_undef (gdbarch
, insn
, dsc
);
6595 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6598 if (bits (insn
, 16, 19) == 0xf)
6600 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6602 return arm_copy_undef (gdbarch
, insn
, dsc
);
6606 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6608 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6612 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6614 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6617 return arm_copy_undef (gdbarch
, insn
, dsc
);
6621 /* Decode miscellaneous instructions in dp/misc encoding space. */
6624 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6625 struct regcache
*regs
,
6626 arm_displaced_step_copy_insn_closure
*dsc
)
6628 unsigned int op2
= bits (insn
, 4, 6);
6629 unsigned int op
= bits (insn
, 21, 22);
6634 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6637 if (op
== 0x1) /* bx. */
6638 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6640 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6642 return arm_copy_undef (gdbarch
, insn
, dsc
);
6646 /* Not really supported. */
6647 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6649 return arm_copy_undef (gdbarch
, insn
, dsc
);
6653 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6654 regs
, dsc
); /* blx register. */
6656 return arm_copy_undef (gdbarch
, insn
, dsc
);
6659 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6663 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6665 /* Not really supported. */
6666 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6670 return arm_copy_undef (gdbarch
, insn
, dsc
);
6675 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6676 struct regcache
*regs
,
6677 arm_displaced_step_copy_insn_closure
*dsc
)
6680 switch (bits (insn
, 20, 24))
6683 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6686 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6688 case 0x12: case 0x16:
6689 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6692 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6696 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6698 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6699 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6700 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6701 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6702 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6703 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6704 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6705 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6706 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6707 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6708 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6709 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6710 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6711 /* 2nd arg means "unprivileged". */
6712 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6716 /* Should be unreachable. */
6721 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6722 struct regcache
*regs
,
6723 arm_displaced_step_copy_insn_closure
*dsc
)
6725 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6726 uint32_t op1
= bits (insn
, 20, 24);
6728 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6729 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6730 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6731 else if ((!a
&& (op1
& 0x17) == 0x02)
6732 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6733 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6734 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6735 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6736 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6737 else if ((!a
&& (op1
& 0x17) == 0x03)
6738 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6739 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6740 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6741 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6742 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6743 else if ((!a
&& (op1
& 0x17) == 0x06)
6744 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6745 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6746 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6747 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6748 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6749 else if ((!a
&& (op1
& 0x17) == 0x07)
6750 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6751 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6753 /* Should be unreachable. */
6758 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6759 arm_displaced_step_copy_insn_closure
*dsc
)
6761 switch (bits (insn
, 20, 24))
6763 case 0x00: case 0x01: case 0x02: case 0x03:
6764 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6766 case 0x04: case 0x05: case 0x06: case 0x07:
6767 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6769 case 0x08: case 0x09: case 0x0a: case 0x0b:
6770 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6771 return arm_copy_unmodified (gdbarch
, insn
,
6772 "decode/pack/unpack/saturate/reverse", dsc
);
6775 if (bits (insn
, 5, 7) == 0) /* op2. */
6777 if (bits (insn
, 12, 15) == 0xf)
6778 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6780 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6783 return arm_copy_undef (gdbarch
, insn
, dsc
);
6785 case 0x1a: case 0x1b:
6786 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6787 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6789 return arm_copy_undef (gdbarch
, insn
, dsc
);
6791 case 0x1c: case 0x1d:
6792 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6794 if (bits (insn
, 0, 3) == 0xf)
6795 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6797 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6800 return arm_copy_undef (gdbarch
, insn
, dsc
);
6802 case 0x1e: case 0x1f:
6803 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6804 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6806 return arm_copy_undef (gdbarch
, insn
, dsc
);
6809 /* Should be unreachable. */
6814 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6815 struct regcache
*regs
,
6816 arm_displaced_step_copy_insn_closure
*dsc
)
6819 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6821 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6825 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6826 struct regcache
*regs
,
6827 arm_displaced_step_copy_insn_closure
*dsc
)
6829 unsigned int opcode
= bits (insn
, 20, 24);
6833 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6834 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6836 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6837 case 0x12: case 0x16:
6838 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6840 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6841 case 0x13: case 0x17:
6842 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6844 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6845 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6846 /* Note: no writeback for these instructions. Bit 25 will always be
6847 zero though (via caller), so the following works OK. */
6848 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6851 /* Should be unreachable. */
6855 /* Decode shifted register instructions. */
6858 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6859 uint16_t insn2
, struct regcache
*regs
,
6860 arm_displaced_step_copy_insn_closure
*dsc
)
6862 /* PC is only allowed to be used in instruction MOV. */
6864 unsigned int op
= bits (insn1
, 5, 8);
6865 unsigned int rn
= bits (insn1
, 0, 3);
6867 if (op
== 0x2 && rn
== 0xf) /* MOV */
6868 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6870 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6871 "dp (shift reg)", dsc
);
6875 /* Decode extension register load/store. Exactly the same as
6876 arm_decode_ext_reg_ld_st. */
6879 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6880 uint16_t insn2
, struct regcache
*regs
,
6881 arm_displaced_step_copy_insn_closure
*dsc
)
6883 unsigned int opcode
= bits (insn1
, 4, 8);
6887 case 0x04: case 0x05:
6888 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6889 "vfp/neon vmov", dsc
);
6891 case 0x08: case 0x0c: /* 01x00 */
6892 case 0x0a: case 0x0e: /* 01x10 */
6893 case 0x12: case 0x16: /* 10x10 */
6894 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6895 "vfp/neon vstm/vpush", dsc
);
6897 case 0x09: case 0x0d: /* 01x01 */
6898 case 0x0b: case 0x0f: /* 01x11 */
6899 case 0x13: case 0x17: /* 10x11 */
6900 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6901 "vfp/neon vldm/vpop", dsc
);
6903 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6904 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6906 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6907 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6910 /* Should be unreachable. */
6915 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6916 regcache
*regs
, arm_displaced_step_copy_insn_closure
*dsc
)
6918 unsigned int op1
= bits (insn
, 20, 25);
6919 int op
= bit (insn
, 4);
6920 unsigned int coproc
= bits (insn
, 8, 11);
6922 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6923 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6924 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6925 && (coproc
& 0xe) != 0xa)
6927 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6928 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6929 && (coproc
& 0xe) != 0xa)
6930 /* ldc/ldc2 imm/lit. */
6931 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6932 else if ((op1
& 0x3e) == 0x00)
6933 return arm_copy_undef (gdbarch
, insn
, dsc
);
6934 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6935 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6936 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6937 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6938 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6939 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6940 else if ((op1
& 0x30) == 0x20 && !op
)
6942 if ((coproc
& 0xe) == 0xa)
6943 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6945 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6947 else if ((op1
& 0x30) == 0x20 && op
)
6948 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6949 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6950 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6951 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6952 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6953 else if ((op1
& 0x30) == 0x30)
6954 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6956 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6960 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6961 uint16_t insn2
, struct regcache
*regs
,
6962 arm_displaced_step_copy_insn_closure
*dsc
)
6964 unsigned int coproc
= bits (insn2
, 8, 11);
6965 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6966 unsigned int bit_9
= bit (insn1
, 9);
6967 unsigned int bit_4
= bit (insn1
, 4);
6972 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6973 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6975 else if (bit_5_8
== 0) /* UNDEFINED. */
6976 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6979 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6980 if ((coproc
& 0xe) == 0xa)
6981 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6983 else /* coproc is not 101x. */
6985 if (bit_4
== 0) /* STC/STC2. */
6986 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6988 else /* LDC/LDC2 {literal, immediate}. */
6989 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6995 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7001 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7002 arm_displaced_step_copy_insn_closure
*dsc
, int rd
)
7008 Preparation: Rd <- PC
7014 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7015 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
7019 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7020 arm_displaced_step_copy_insn_closure
*dsc
,
7021 int rd
, unsigned int imm
)
7024 /* Encoding T2: ADDS Rd, #imm */
7025 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
7027 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7033 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
7034 struct regcache
*regs
,
7035 arm_displaced_step_copy_insn_closure
*dsc
)
7037 unsigned int rd
= bits (insn
, 8, 10);
7038 unsigned int imm8
= bits (insn
, 0, 7);
7040 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7043 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
7047 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7048 uint16_t insn2
, struct regcache
*regs
,
7049 arm_displaced_step_copy_insn_closure
*dsc
)
7051 unsigned int rd
= bits (insn2
, 8, 11);
7052 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7053 extract raw immediate encoding rather than computing immediate. When
7054 generating ADD or SUB instruction, we can simply perform OR operation to
7055 set immediate into ADD. */
7056 unsigned int imm_3_8
= insn2
& 0x70ff;
7057 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
7059 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7060 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
7062 if (bit (insn1
, 7)) /* Encoding T2 */
7064 /* Encoding T3: SUB Rd, Rd, #imm */
7065 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
7066 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7068 else /* Encoding T3 */
7070 /* Encoding T3: ADD Rd, Rd, #imm */
7071 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
7072 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7076 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7082 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
7083 struct regcache
*regs
,
7084 arm_displaced_step_copy_insn_closure
*dsc
)
7086 unsigned int rt
= bits (insn1
, 8, 10);
7088 int imm8
= (bits (insn1
, 0, 7) << 2);
7094 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7096 Insn: LDR R0, [R2, R3];
7097 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7099 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt
, imm8
);
7101 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
7102 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
7103 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
7104 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7105 /* The assembler calculates the required value of the offset from the
7106 Align(PC,4) value of this instruction to the label. */
7107 pc
= pc
& 0xfffffffc;
7109 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
7110 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
7113 dsc
->u
.ldst
.xfersize
= 4;
7115 dsc
->u
.ldst
.immed
= 0;
7116 dsc
->u
.ldst
.writeback
= 0;
7117 dsc
->u
.ldst
.restore_r4
= 0;
7119 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7121 dsc
->cleanup
= &cleanup_load
;
7126 /* Copy Thumb cbnz/cbz instruction. */
7129 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
7130 struct regcache
*regs
,
7131 arm_displaced_step_copy_insn_closure
*dsc
)
7133 int non_zero
= bit (insn1
, 11);
7134 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
7135 CORE_ADDR from
= dsc
->insn_addr
;
7136 int rn
= bits (insn1
, 0, 2);
7137 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7139 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7140 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7141 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7142 condition is false, let it be, cleanup_branch will do nothing. */
7143 if (dsc
->u
.branch
.cond
)
7145 dsc
->u
.branch
.cond
= INST_AL
;
7146 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7149 dsc
->u
.branch
.dest
= from
+ 2;
7151 dsc
->u
.branch
.link
= 0;
7152 dsc
->u
.branch
.exchange
= 0;
7154 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7155 non_zero
? "cbnz" : "cbz",
7156 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7158 dsc
->modinsn
[0] = THUMB_NOP
;
7160 dsc
->cleanup
= &cleanup_branch
;
7164 /* Copy Table Branch Byte/Halfword */
7166 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7167 uint16_t insn2
, struct regcache
*regs
,
7168 arm_displaced_step_copy_insn_closure
*dsc
)
7170 ULONGEST rn_val
, rm_val
;
7171 int is_tbh
= bit (insn2
, 4);
7172 CORE_ADDR halfwords
= 0;
7173 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7175 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7176 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7182 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7183 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7189 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7190 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7193 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7194 is_tbh
? "tbh" : "tbb",
7195 (unsigned int) rn_val
, (unsigned int) rm_val
,
7196 (unsigned int) halfwords
);
7198 dsc
->u
.branch
.cond
= INST_AL
;
7199 dsc
->u
.branch
.link
= 0;
7200 dsc
->u
.branch
.exchange
= 0;
7201 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7203 dsc
->cleanup
= &cleanup_branch
;
7209 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7210 arm_displaced_step_copy_insn_closure
*dsc
)
7213 int val
= displaced_read_reg (regs
, dsc
, 7);
7214 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7217 val
= displaced_read_reg (regs
, dsc
, 8);
7218 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7221 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7226 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7227 struct regcache
*regs
,
7228 arm_displaced_step_copy_insn_closure
*dsc
)
7230 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7232 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7235 (1) register list is full, that is, r0-r7 are used.
7236 Prepare: tmp[0] <- r8
7238 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7239 MOV r8, r7; Move value of r7 to r8;
7240 POP {r7}; Store PC value into r7.
7242 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7244 (2) register list is not full, supposing there are N registers in
7245 register list (except PC, 0 <= N <= 7).
7246 Prepare: for each i, 0 - N, tmp[i] <- ri.
7248 POP {r0, r1, ...., rN};
7250 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7251 from tmp[] properly.
7253 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7254 dsc
->u
.block
.regmask
, insn1
);
7256 if (dsc
->u
.block
.regmask
== 0xff)
7258 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7260 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7261 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7262 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7265 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7269 unsigned int num_in_list
= count_one_bits (dsc
->u
.block
.regmask
);
7271 unsigned int new_regmask
;
7273 for (i
= 0; i
< num_in_list
+ 1; i
++)
7274 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7276 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7278 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7279 "modified list %.4x",
7280 (int) dsc
->u
.block
.regmask
, new_regmask
);
7282 dsc
->u
.block
.regmask
|= 0x8000;
7283 dsc
->u
.block
.writeback
= 0;
7284 dsc
->u
.block
.cond
= INST_AL
;
7286 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7288 dsc
->cleanup
= &cleanup_block_load_pc
;
7295 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7296 struct regcache
*regs
,
7297 arm_displaced_step_copy_insn_closure
*dsc
)
7299 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7300 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7303 /* 16-bit thumb instructions. */
7304 switch (op_bit_12_15
)
7306 /* Shift (imme), add, subtract, move and compare. */
7307 case 0: case 1: case 2: case 3:
7308 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7309 "shift/add/sub/mov/cmp",
7313 switch (op_bit_10_11
)
7315 case 0: /* Data-processing */
7316 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7320 case 1: /* Special data instructions and branch and exchange. */
7322 unsigned short op
= bits (insn1
, 7, 9);
7323 if (op
== 6 || op
== 7) /* BX or BLX */
7324 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7325 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7326 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7328 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7332 default: /* LDR (literal) */
7333 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7336 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7337 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7340 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7341 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7342 else /* Generate SP-relative address */
7343 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7345 case 11: /* Misc 16-bit instructions */
7347 switch (bits (insn1
, 8, 11))
7349 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7350 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7352 case 12: case 13: /* POP */
7353 if (bit (insn1
, 8)) /* PC is in register list. */
7354 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7356 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7358 case 15: /* If-Then, and hints */
7359 if (bits (insn1
, 0, 3))
7360 /* If-Then makes up to four following instructions conditional.
7361 IT instruction itself is not conditional, so handle it as a
7362 common unmodified instruction. */
7363 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7366 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7369 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7374 if (op_bit_10_11
< 2) /* Store multiple registers */
7375 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7376 else /* Load multiple registers */
7377 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7379 case 13: /* Conditional branch and supervisor call */
7380 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7381 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7383 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7385 case 14: /* Unconditional branch */
7386 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7393 internal_error (__FILE__
, __LINE__
,
7394 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7398 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7399 uint16_t insn1
, uint16_t insn2
,
7400 struct regcache
*regs
,
7401 arm_displaced_step_copy_insn_closure
*dsc
)
7403 int rt
= bits (insn2
, 12, 15);
7404 int rn
= bits (insn1
, 0, 3);
7405 int op1
= bits (insn1
, 7, 8);
7407 switch (bits (insn1
, 5, 6))
7409 case 0: /* Load byte and memory hints */
7410 if (rt
== 0xf) /* PLD/PLI */
7413 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7414 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7416 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7421 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7422 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7425 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7426 "ldrb{reg, immediate}/ldrbt",
7431 case 1: /* Load halfword and memory hints. */
7432 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7433 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7434 "pld/unalloc memhint", dsc
);
7438 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7441 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7445 case 2: /* Load word */
7447 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7450 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7451 else if (op1
== 0x1) /* Encoding T3 */
7452 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7454 else /* op1 == 0x0 */
7456 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7457 /* LDR (immediate) */
7458 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7459 dsc
, bit (insn2
, 8), 1);
7460 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7461 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7464 /* LDR (register) */
7465 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7471 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7478 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7479 uint16_t insn2
, struct regcache
*regs
,
7480 arm_displaced_step_copy_insn_closure
*dsc
)
7483 unsigned short op
= bit (insn2
, 15);
7484 unsigned int op1
= bits (insn1
, 11, 12);
7490 switch (bits (insn1
, 9, 10))
7495 /* Load/store {dual, exclusive}, table branch. */
7496 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7497 && bits (insn2
, 5, 7) == 0)
7498 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7501 /* PC is not allowed to use in load/store {dual, exclusive}
7503 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7504 "load/store dual/ex", dsc
);
7506 else /* load/store multiple */
7508 switch (bits (insn1
, 7, 8))
7510 case 0: case 3: /* SRS, RFE */
7511 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7514 case 1: case 2: /* LDM/STM/PUSH/POP */
7515 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7522 /* Data-processing (shift register). */
7523 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7526 default: /* Coprocessor instructions. */
7527 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7532 case 2: /* op1 = 2 */
7533 if (op
) /* Branch and misc control. */
7535 if (bit (insn2
, 14) /* BLX/BL */
7536 || bit (insn2
, 12) /* Unconditional branch */
7537 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7538 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7540 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7545 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7547 int dp_op
= bits (insn1
, 4, 8);
7548 int rn
= bits (insn1
, 0, 3);
7549 if ((dp_op
== 0 || dp_op
== 0xa) && rn
== 0xf)
7550 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7553 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7556 else /* Data processing (modified immediate) */
7557 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7561 case 3: /* op1 = 3 */
7562 switch (bits (insn1
, 9, 10))
7566 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7568 else /* NEON Load/Store and Store single data item */
7569 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7570 "neon elt/struct load/store",
7573 case 1: /* op1 = 3, bits (9, 10) == 1 */
7574 switch (bits (insn1
, 7, 8))
7576 case 0: case 1: /* Data processing (register) */
7577 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7580 case 2: /* Multiply and absolute difference */
7581 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7582 "mul/mua/diff", dsc
);
7584 case 3: /* Long multiply and divide */
7585 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7590 default: /* Coprocessor instructions */
7591 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7600 internal_error (__FILE__
, __LINE__
,
7601 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7606 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7607 struct regcache
*regs
,
7608 arm_displaced_step_copy_insn_closure
*dsc
)
7610 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7612 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7614 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7615 insn1
, (unsigned long) from
);
7618 dsc
->insn_size
= thumb_insn_size (insn1
);
7619 if (thumb_insn_size (insn1
) == 4)
7622 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7623 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7626 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7630 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7631 CORE_ADDR to
, struct regcache
*regs
,
7632 arm_displaced_step_copy_insn_closure
*dsc
)
7635 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7638 /* Most displaced instructions use a 1-instruction scratch space, so set this
7639 here and override below if/when necessary. */
7641 dsc
->insn_addr
= from
;
7642 dsc
->scratch_base
= to
;
7643 dsc
->cleanup
= NULL
;
7644 dsc
->wrote_to_pc
= 0;
7646 if (!displaced_in_arm_mode (regs
))
7647 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7651 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7652 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7653 (unsigned long) insn
, (unsigned long) from
);
7655 if ((insn
& 0xf0000000) == 0xf0000000)
7656 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7657 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7659 case 0x0: case 0x1: case 0x2: case 0x3:
7660 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7663 case 0x4: case 0x5: case 0x6:
7664 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7668 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7671 case 0x8: case 0x9: case 0xa: case 0xb:
7672 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7675 case 0xc: case 0xd: case 0xe: case 0xf:
7676 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7681 internal_error (__FILE__
, __LINE__
,
7682 _("arm_process_displaced_insn: Instruction decode error"));
7685 /* Actually set up the scratch space for a displaced instruction. */
7688 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7690 arm_displaced_step_copy_insn_closure
*dsc
)
7692 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
7693 unsigned int i
, len
, offset
;
7694 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7695 int size
= dsc
->is_thumb
? 2 : 4;
7696 const gdb_byte
*bkp_insn
;
7699 /* Poke modified instruction(s). */
7700 for (i
= 0; i
< dsc
->numinsns
; i
++)
7703 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7704 dsc
->modinsn
[i
], (unsigned long) to
+ offset
);
7706 displaced_debug_printf ("writing insn %.4x at %.8lx",
7707 (unsigned short) dsc
->modinsn
[i
],
7708 (unsigned long) to
+ offset
);
7710 write_memory_unsigned_integer (to
+ offset
, size
,
7711 byte_order_for_code
,
7716 /* Choose the correct breakpoint instruction. */
7719 bkp_insn
= tdep
->thumb_breakpoint
;
7720 len
= tdep
->thumb_breakpoint_size
;
7724 bkp_insn
= tdep
->arm_breakpoint
;
7725 len
= tdep
->arm_breakpoint_size
;
7728 /* Put breakpoint afterwards. */
7729 write_memory (to
+ offset
, bkp_insn
, len
);
7731 displaced_debug_printf ("copy %s->%s", paddress (gdbarch
, from
),
7732 paddress (gdbarch
, to
));
7735 /* Entry point for cleaning things up after a displaced instruction has been
7739 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7740 struct displaced_step_copy_insn_closure
*dsc_
,
7741 CORE_ADDR from
, CORE_ADDR to
,
7742 struct regcache
*regs
)
7744 arm_displaced_step_copy_insn_closure
*dsc
7745 = (arm_displaced_step_copy_insn_closure
*) dsc_
;
7748 dsc
->cleanup (gdbarch
, regs
, dsc
);
7750 if (!dsc
->wrote_to_pc
)
7751 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7752 dsc
->insn_addr
+ dsc
->insn_size
);
7756 #include "bfd-in2.h"
7757 #include "libcoff.h"
7760 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7762 gdb_disassembler
*di
7763 = static_cast<gdb_disassembler
*>(info
->application_data
);
7764 struct gdbarch
*gdbarch
= di
->arch ();
7766 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7768 static asymbol
*asym
;
7769 static combined_entry_type ce
;
7770 static struct coff_symbol_struct csym
;
7771 static struct bfd fake_bfd
;
7772 static bfd_target fake_target
;
7774 if (csym
.native
== NULL
)
7776 /* Create a fake symbol vector containing a Thumb symbol.
7777 This is solely so that the code in print_insn_little_arm()
7778 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7779 the presence of a Thumb symbol and switch to decoding
7780 Thumb instructions. */
7782 fake_target
.flavour
= bfd_target_coff_flavour
;
7783 fake_bfd
.xvec
= &fake_target
;
7784 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7786 csym
.symbol
.the_bfd
= &fake_bfd
;
7787 csym
.symbol
.name
= "fake";
7788 asym
= (asymbol
*) & csym
;
7791 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7792 info
->symbols
= &asym
;
7795 info
->symbols
= NULL
;
7797 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7798 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7799 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7800 the assert on the mismatch of info->mach and
7801 bfd_get_mach (current_program_space->exec_bfd ()) in
7802 default_print_insn. */
7803 if (current_program_space
->exec_bfd () != NULL
7804 && (current_program_space
->exec_bfd ()->arch_info
7805 == gdbarch_bfd_arch_info (gdbarch
)))
7806 info
->flags
|= USER_SPECIFIED_MACHINE_TYPE
;
7808 return default_print_insn (memaddr
, info
);
7811 /* The following define instruction sequences that will cause ARM
7812 cpu's to take an undefined instruction trap. These are used to
7813 signal a breakpoint to GDB.
7815 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7816 modes. A different instruction is required for each mode. The ARM
7817 cpu's can also be big or little endian. Thus four different
7818 instructions are needed to support all cases.
7820 Note: ARMv4 defines several new instructions that will take the
7821 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7822 not in fact add the new instructions. The new undefined
7823 instructions in ARMv4 are all instructions that had no defined
7824 behaviour in earlier chips. There is no guarantee that they will
7825 raise an exception, but may be treated as NOP's. In practice, it
7826 may only safe to rely on instructions matching:
7828 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7829 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7830 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7832 Even this may only true if the condition predicate is true. The
7833 following use a condition predicate of ALWAYS so it is always TRUE.
7835 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7836 and NetBSD all use a software interrupt rather than an undefined
7837 instruction to force a trap. This can be handled by by the
7838 abi-specific code during establishment of the gdbarch vector. */
7840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7845 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7846 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7847 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7848 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7850 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7853 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7855 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
7856 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7858 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7860 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7862 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7863 check whether we are replacing a 32-bit instruction. */
7864 if (tdep
->thumb2_breakpoint
!= NULL
)
7868 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7870 unsigned short inst1
;
7872 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7873 if (thumb_insn_size (inst1
) == 4)
7874 return ARM_BP_KIND_THUMB2
;
7878 return ARM_BP_KIND_THUMB
;
7881 return ARM_BP_KIND_ARM
;
7885 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7887 static const gdb_byte
*
7888 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7890 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
7894 case ARM_BP_KIND_ARM
:
7895 *size
= tdep
->arm_breakpoint_size
;
7896 return tdep
->arm_breakpoint
;
7897 case ARM_BP_KIND_THUMB
:
7898 *size
= tdep
->thumb_breakpoint_size
;
7899 return tdep
->thumb_breakpoint
;
7900 case ARM_BP_KIND_THUMB2
:
7901 *size
= tdep
->thumb2_breakpoint_size
;
7902 return tdep
->thumb2_breakpoint
;
7904 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7908 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7911 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7912 struct regcache
*regcache
,
7917 /* Check the memory pointed by PC is readable. */
7918 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7920 struct arm_get_next_pcs next_pcs_ctx
;
7922 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7923 &arm_get_next_pcs_ops
,
7924 gdbarch_byte_order (gdbarch
),
7925 gdbarch_byte_order_for_code (gdbarch
),
7929 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7931 /* If MEMADDR is the next instruction of current pc, do the
7932 software single step computation, and get the thumb mode by
7933 the destination address. */
7934 for (CORE_ADDR pc
: next_pcs
)
7936 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7938 if (IS_THUMB_ADDR (pc
))
7940 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7941 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7944 return ARM_BP_KIND_ARM
;
7949 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7952 /* Extract from an array REGBUF containing the (raw) register state a
7953 function return value of type TYPE, and copy that, in virtual
7954 format, into VALBUF. */
7957 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7960 struct gdbarch
*gdbarch
= regs
->arch ();
7961 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7962 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
7964 if (TYPE_CODE_FLT
== type
->code ())
7966 switch (tdep
->fp_model
)
7970 /* The value is in register F0 in internal format. We need to
7971 extract the raw value and then convert it to the desired
7973 bfd_byte tmpbuf
[ARM_FP_REGISTER_SIZE
];
7975 regs
->cooked_read (ARM_F0_REGNUM
, tmpbuf
);
7976 target_float_convert (tmpbuf
, arm_ext_type (gdbarch
),
7981 case ARM_FLOAT_SOFT_FPA
:
7982 case ARM_FLOAT_SOFT_VFP
:
7983 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7984 not using the VFP ABI code. */
7986 regs
->cooked_read (ARM_A1_REGNUM
, valbuf
);
7987 if (TYPE_LENGTH (type
) > 4)
7988 regs
->cooked_read (ARM_A1_REGNUM
+ 1,
7989 valbuf
+ ARM_INT_REGISTER_SIZE
);
7993 internal_error (__FILE__
, __LINE__
,
7994 _("arm_extract_return_value: "
7995 "Floating point model not supported"));
7999 else if (type
->code () == TYPE_CODE_INT
8000 || type
->code () == TYPE_CODE_CHAR
8001 || type
->code () == TYPE_CODE_BOOL
8002 || type
->code () == TYPE_CODE_PTR
8003 || TYPE_IS_REFERENCE (type
)
8004 || type
->code () == TYPE_CODE_ENUM
8005 || is_fixed_point_type (type
))
8007 /* If the type is a plain integer, then the access is
8008 straight-forward. Otherwise we have to play around a bit
8010 int len
= TYPE_LENGTH (type
);
8011 int regno
= ARM_A1_REGNUM
;
8016 /* By using store_unsigned_integer we avoid having to do
8017 anything special for small big-endian values. */
8018 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8019 store_unsigned_integer (valbuf
,
8020 (len
> ARM_INT_REGISTER_SIZE
8021 ? ARM_INT_REGISTER_SIZE
: len
),
8023 len
-= ARM_INT_REGISTER_SIZE
;
8024 valbuf
+= ARM_INT_REGISTER_SIZE
;
8029 /* For a structure or union the behaviour is as if the value had
8030 been stored to word-aligned memory and then loaded into
8031 registers with 32-bit load instruction(s). */
8032 int len
= TYPE_LENGTH (type
);
8033 int regno
= ARM_A1_REGNUM
;
8034 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8038 regs
->cooked_read (regno
++, tmpbuf
);
8039 memcpy (valbuf
, tmpbuf
,
8040 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
8041 len
-= ARM_INT_REGISTER_SIZE
;
8042 valbuf
+= ARM_INT_REGISTER_SIZE
;
8048 /* Will a function return an aggregate type in memory or in a
8049 register? Return 0 if an aggregate type can be returned in a
8050 register, 1 if it must be returned in memory. */
8053 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
8055 enum type_code code
;
8057 type
= check_typedef (type
);
8059 /* Simple, non-aggregate types (ie not including vectors and
8060 complex) are always returned in a register (or registers). */
8061 code
= type
->code ();
8062 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
8063 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
8066 if (TYPE_CODE_ARRAY
== code
&& type
->is_vector ())
8068 /* Vector values should be returned using ARM registers if they
8069 are not over 16 bytes. */
8070 return (TYPE_LENGTH (type
) > 16);
8073 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8074 if (tdep
->arm_abi
!= ARM_ABI_APCS
)
8076 /* The AAPCS says all aggregates not larger than a word are returned
8078 if (TYPE_LENGTH (type
) <= ARM_INT_REGISTER_SIZE
)
8087 /* All aggregate types that won't fit in a register must be returned
8089 if (TYPE_LENGTH (type
) > ARM_INT_REGISTER_SIZE
)
8092 /* In the ARM ABI, "integer" like aggregate types are returned in
8093 registers. For an aggregate type to be integer like, its size
8094 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8095 offset of each addressable subfield must be zero. Note that bit
8096 fields are not addressable, and all addressable subfields of
8097 unions always start at offset zero.
8099 This function is based on the behaviour of GCC 2.95.1.
8100 See: gcc/arm.c: arm_return_in_memory() for details.
8102 Note: All versions of GCC before GCC 2.95.2 do not set up the
8103 parameters correctly for a function returning the following
8104 structure: struct { float f;}; This should be returned in memory,
8105 not a register. Richard Earnshaw sent me a patch, but I do not
8106 know of any way to detect if a function like the above has been
8107 compiled with the correct calling convention. */
8109 /* Assume all other aggregate types can be returned in a register.
8110 Run a check for structures, unions and arrays. */
8113 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
8116 /* Need to check if this struct/union is "integer" like. For
8117 this to be true, its size must be less than or equal to
8118 ARM_INT_REGISTER_SIZE and the offset of each addressable
8119 subfield must be zero. Note that bit fields are not
8120 addressable, and unions always start at offset zero. If any
8121 of the subfields is a floating point type, the struct/union
8122 cannot be an integer type. */
8124 /* For each field in the object, check:
8125 1) Is it FP? --> yes, nRc = 1;
8126 2) Is it addressable (bitpos != 0) and
8127 not packed (bitsize == 0)?
8131 for (i
= 0; i
< type
->num_fields (); i
++)
8133 enum type_code field_type_code
;
8136 = check_typedef (type
->field (i
).type ())->code ();
8138 /* Is it a floating point type field? */
8139 if (field_type_code
== TYPE_CODE_FLT
)
8145 /* If bitpos != 0, then we have to care about it. */
8146 if (type
->field (i
).loc_bitpos () != 0)
8148 /* Bitfields are not addressable. If the field bitsize is
8149 zero, then the field is not packed. Hence it cannot be
8150 a bitfield or any other packed type. */
8151 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8164 /* Write into appropriate registers a function return value of type
8165 TYPE, given in virtual format. */
8168 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8169 const gdb_byte
*valbuf
)
8171 struct gdbarch
*gdbarch
= regs
->arch ();
8172 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8174 if (type
->code () == TYPE_CODE_FLT
)
8176 gdb_byte buf
[ARM_FP_REGISTER_SIZE
];
8177 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8179 switch (tdep
->fp_model
)
8183 target_float_convert (valbuf
, type
, buf
, arm_ext_type (gdbarch
));
8184 regs
->cooked_write (ARM_F0_REGNUM
, buf
);
8187 case ARM_FLOAT_SOFT_FPA
:
8188 case ARM_FLOAT_SOFT_VFP
:
8189 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8190 not using the VFP ABI code. */
8192 regs
->cooked_write (ARM_A1_REGNUM
, valbuf
);
8193 if (TYPE_LENGTH (type
) > 4)
8194 regs
->cooked_write (ARM_A1_REGNUM
+ 1,
8195 valbuf
+ ARM_INT_REGISTER_SIZE
);
8199 internal_error (__FILE__
, __LINE__
,
8200 _("arm_store_return_value: Floating "
8201 "point model not supported"));
8205 else if (type
->code () == TYPE_CODE_INT
8206 || type
->code () == TYPE_CODE_CHAR
8207 || type
->code () == TYPE_CODE_BOOL
8208 || type
->code () == TYPE_CODE_PTR
8209 || TYPE_IS_REFERENCE (type
)
8210 || type
->code () == TYPE_CODE_ENUM
)
8212 if (TYPE_LENGTH (type
) <= 4)
8214 /* Values of one word or less are zero/sign-extended and
8216 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8217 LONGEST val
= unpack_long (type
, valbuf
);
8219 store_signed_integer (tmpbuf
, ARM_INT_REGISTER_SIZE
, byte_order
, val
);
8220 regs
->cooked_write (ARM_A1_REGNUM
, tmpbuf
);
8224 /* Integral values greater than one word are stored in consecutive
8225 registers starting with r0. This will always be a multiple of
8226 the regiser size. */
8227 int len
= TYPE_LENGTH (type
);
8228 int regno
= ARM_A1_REGNUM
;
8232 regs
->cooked_write (regno
++, valbuf
);
8233 len
-= ARM_INT_REGISTER_SIZE
;
8234 valbuf
+= ARM_INT_REGISTER_SIZE
;
8240 /* For a structure or union the behaviour is as if the value had
8241 been stored to word-aligned memory and then loaded into
8242 registers with 32-bit load instruction(s). */
8243 int len
= TYPE_LENGTH (type
);
8244 int regno
= ARM_A1_REGNUM
;
8245 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8249 memcpy (tmpbuf
, valbuf
,
8250 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
8251 regs
->cooked_write (regno
++, tmpbuf
);
8252 len
-= ARM_INT_REGISTER_SIZE
;
8253 valbuf
+= ARM_INT_REGISTER_SIZE
;
8259 /* Handle function return values. */
8261 static enum return_value_convention
8262 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8263 struct type
*valtype
, struct regcache
*regcache
,
8264 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8266 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8267 struct type
*func_type
= function
? value_type (function
) : NULL
;
8268 enum arm_vfp_cprc_base_type vfp_base_type
;
8271 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8272 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8274 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8275 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8277 for (i
= 0; i
< vfp_base_count
; i
++)
8279 if (reg_char
== 'q')
8282 arm_neon_quad_write (gdbarch
, regcache
, i
,
8283 writebuf
+ i
* unit_length
);
8286 arm_neon_quad_read (gdbarch
, regcache
, i
,
8287 readbuf
+ i
* unit_length
);
8294 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8295 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8298 regcache
->cooked_write (regnum
, writebuf
+ i
* unit_length
);
8300 regcache
->cooked_read (regnum
, readbuf
+ i
* unit_length
);
8303 return RETURN_VALUE_REGISTER_CONVENTION
;
8306 if (valtype
->code () == TYPE_CODE_STRUCT
8307 || valtype
->code () == TYPE_CODE_UNION
8308 || valtype
->code () == TYPE_CODE_ARRAY
)
8310 if (tdep
->struct_return
== pcc_struct_return
8311 || arm_return_in_memory (gdbarch
, valtype
))
8312 return RETURN_VALUE_STRUCT_CONVENTION
;
8314 else if (valtype
->code () == TYPE_CODE_COMPLEX
)
8316 if (arm_return_in_memory (gdbarch
, valtype
))
8317 return RETURN_VALUE_STRUCT_CONVENTION
;
8321 arm_store_return_value (valtype
, regcache
, writebuf
);
8324 arm_extract_return_value (valtype
, regcache
, readbuf
);
8326 return RETURN_VALUE_REGISTER_CONVENTION
;
8331 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8333 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8334 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8335 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8337 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
8339 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8341 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8342 ARM_INT_REGISTER_SIZE
))
8345 *pc
= extract_unsigned_integer (buf
, ARM_INT_REGISTER_SIZE
, byte_order
);
8348 /* A call to cmse secure entry function "foo" at "a" is modified by
8355 b) bl yyyy <__acle_se_foo>
8357 section .gnu.sgstubs:
8359 yyyy: sg // secure gateway
8360 b.w xxxx <__acle_se_foo> // original_branch_dest
8365 When the control at "b", the pc contains "yyyy" (sg address) which is a
8366 trampoline and does not exist in source code. This function returns the
8367 target pc "xxxx". For more details please refer to section 5.4
8368 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8369 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8370 document on www.developer.arm.com. */
8373 arm_skip_cmse_entry (CORE_ADDR pc
, const char *name
, struct objfile
*objfile
)
8375 int target_len
= strlen (name
) + strlen ("__acle_se_") + 1;
8376 char *target_name
= (char *) alloca (target_len
);
8377 xsnprintf (target_name
, target_len
, "%s%s", "__acle_se_", name
);
8379 struct bound_minimal_symbol minsym
8380 = lookup_minimal_symbol (target_name
, NULL
, objfile
);
8382 if (minsym
.minsym
!= nullptr)
8383 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8388 /* Return true when SEC points to ".gnu.sgstubs" section. */
8391 arm_is_sgstubs_section (struct obj_section
*sec
)
8393 return (sec
!= nullptr
8394 && sec
->the_bfd_section
!= nullptr
8395 && sec
->the_bfd_section
->name
!= nullptr
8396 && streq (sec
->the_bfd_section
->name
, ".gnu.sgstubs"));
8399 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8400 return the target PC. Otherwise return 0. */
8403 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8407 CORE_ADDR start_addr
;
8409 /* Find the starting address and name of the function containing the PC. */
8410 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8412 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8414 start_addr
= arm_skip_bx_reg (frame
, pc
);
8415 if (start_addr
!= 0)
8421 /* If PC is in a Thumb call or return stub, return the address of the
8422 target PC, which is in a register. The thunk functions are called
8423 _call_via_xx, where x is the register name. The possible names
8424 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8425 functions, named __ARM_call_via_r[0-7]. */
8426 if (startswith (name
, "_call_via_")
8427 || startswith (name
, "__ARM_call_via_"))
8429 /* Use the name suffix to determine which register contains the
8431 static const char *table
[15] =
8432 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8433 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8436 int offset
= strlen (name
) - 2;
8438 for (regno
= 0; regno
<= 14; regno
++)
8439 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8440 return get_frame_register_unsigned (frame
, regno
);
8443 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8444 non-interworking calls to foo. We could decode the stubs
8445 to find the target but it's easier to use the symbol table. */
8446 namelen
= strlen (name
);
8447 if (name
[0] == '_' && name
[1] == '_'
8448 && ((namelen
> 2 + strlen ("_from_thumb")
8449 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8450 || (namelen
> 2 + strlen ("_from_arm")
8451 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8454 int target_len
= namelen
- 2;
8455 struct bound_minimal_symbol minsym
;
8456 struct objfile
*objfile
;
8457 struct obj_section
*sec
;
8459 if (name
[namelen
- 1] == 'b')
8460 target_len
-= strlen ("_from_thumb");
8462 target_len
-= strlen ("_from_arm");
8464 target_name
= (char *) alloca (target_len
+ 1);
8465 memcpy (target_name
, name
+ 2, target_len
);
8466 target_name
[target_len
] = '\0';
8468 sec
= find_pc_section (pc
);
8469 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8470 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8471 if (minsym
.minsym
!= NULL
)
8472 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8477 struct obj_section
*section
= find_pc_section (pc
);
8479 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8480 if (arm_is_sgstubs_section (section
))
8481 return arm_skip_cmse_entry (pc
, name
, section
->objfile
);
8483 return 0; /* not a stub */
8487 arm_update_current_architecture (void)
8489 /* If the current architecture is not ARM, we have nothing to do. */
8490 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8493 /* Update the architecture. */
8495 if (!gdbarch_update_p (info
))
8496 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8500 set_fp_model_sfunc (const char *args
, int from_tty
,
8501 struct cmd_list_element
*c
)
8505 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8506 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8508 arm_fp_model
= (enum arm_float_model
) fp_model
;
8512 if (fp_model
== ARM_FLOAT_LAST
)
8513 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8516 arm_update_current_architecture ();
8520 show_fp_model (struct ui_file
*file
, int from_tty
,
8521 struct cmd_list_element
*c
, const char *value
)
8523 arm_gdbarch_tdep
*tdep
8524 = (arm_gdbarch_tdep
*) gdbarch_tdep (target_gdbarch ());
8526 if (arm_fp_model
== ARM_FLOAT_AUTO
8527 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8528 fprintf_filtered (file
, _("\
8529 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8530 fp_model_strings
[tdep
->fp_model
]);
8532 fprintf_filtered (file
, _("\
8533 The current ARM floating point model is \"%s\".\n"),
8534 fp_model_strings
[arm_fp_model
]);
8538 arm_set_abi (const char *args
, int from_tty
,
8539 struct cmd_list_element
*c
)
8543 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8544 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8546 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8550 if (arm_abi
== ARM_ABI_LAST
)
8551 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8554 arm_update_current_architecture ();
8558 arm_show_abi (struct ui_file
*file
, int from_tty
,
8559 struct cmd_list_element
*c
, const char *value
)
8561 arm_gdbarch_tdep
*tdep
8562 = (arm_gdbarch_tdep
*) gdbarch_tdep (target_gdbarch ());
8564 if (arm_abi_global
== ARM_ABI_AUTO
8565 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8566 fprintf_filtered (file
, _("\
8567 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8568 arm_abi_strings
[tdep
->arm_abi
]);
8570 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8575 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8576 struct cmd_list_element
*c
, const char *value
)
8578 fprintf_filtered (file
,
8579 _("The current execution mode assumed "
8580 "(when symbols are unavailable) is \"%s\".\n"),
8581 arm_fallback_mode_string
);
8585 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8586 struct cmd_list_element
*c
, const char *value
)
8588 fprintf_filtered (file
,
8589 _("The current execution mode assumed "
8590 "(even when symbols are available) is \"%s\".\n"),
8591 arm_force_mode_string
);
8594 /* If the user changes the register disassembly style used for info
8595 register and other commands, we have to also switch the style used
8596 in opcodes for disassembly output. This function is run in the "set
8597 arm disassembly" command, and does that. */
8600 set_disassembly_style_sfunc (const char *args
, int from_tty
,
8601 struct cmd_list_element
*c
)
8603 /* Convert the short style name into the long style name (eg, reg-names-*)
8604 before calling the generic set_disassembler_options() function. */
8605 std::string long_name
= std::string ("reg-names-") + disassembly_style
;
8606 set_disassembler_options (&long_name
[0]);
8610 show_disassembly_style_sfunc (struct ui_file
*file
, int from_tty
,
8611 struct cmd_list_element
*c
, const char *value
)
8613 struct gdbarch
*gdbarch
= get_current_arch ();
8614 char *options
= get_disassembler_options (gdbarch
);
8615 const char *style
= "";
8619 FOR_EACH_DISASSEMBLER_OPTION (opt
, options
)
8620 if (startswith (opt
, "reg-names-"))
8622 style
= &opt
[strlen ("reg-names-")];
8623 len
= strcspn (style
, ",");
8626 fprintf_filtered (file
, "The disassembly style is \"%.*s\".\n", len
, style
);
8629 /* Return the ARM register name corresponding to register I. */
8631 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8633 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8635 if (is_s_pseudo (gdbarch
, i
))
8637 static const char *const s_pseudo_names
[] = {
8638 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8639 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8640 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8641 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8644 return s_pseudo_names
[i
- tdep
->s_pseudo_base
];
8647 if (is_q_pseudo (gdbarch
, i
))
8649 static const char *const q_pseudo_names
[] = {
8650 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8651 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8654 return q_pseudo_names
[i
- tdep
->q_pseudo_base
];
8657 if (is_mve_pseudo (gdbarch
, i
))
8660 if (i
>= ARRAY_SIZE (arm_register_names
))
8661 /* These registers are only supported on targets which supply
8662 an XML description. */
8665 /* Non-pseudo registers. */
8666 return arm_register_names
[i
];
8669 /* Test whether the coff symbol specific value corresponds to a Thumb
8673 coff_sym_is_thumb (int val
)
8675 return (val
== C_THUMBEXT
8676 || val
== C_THUMBSTAT
8677 || val
== C_THUMBEXTFUNC
8678 || val
== C_THUMBSTATFUNC
8679 || val
== C_THUMBLABEL
);
8682 /* arm_coff_make_msymbol_special()
8683 arm_elf_make_msymbol_special()
8685 These functions test whether the COFF or ELF symbol corresponds to
8686 an address in thumb code, and set a "special" bit in a minimal
8687 symbol to indicate that it does. */
8690 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8692 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8694 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8695 == ST_BRANCH_TO_THUMB
)
8696 MSYMBOL_SET_SPECIAL (msym
);
8700 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8702 if (coff_sym_is_thumb (val
))
8703 MSYMBOL_SET_SPECIAL (msym
);
8707 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8710 const char *name
= bfd_asymbol_name (sym
);
8711 struct arm_per_bfd
*data
;
8712 struct arm_mapping_symbol new_map_sym
;
8714 gdb_assert (name
[0] == '$');
8715 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8718 data
= arm_bfd_data_key
.get (objfile
->obfd
);
8720 data
= arm_bfd_data_key
.emplace (objfile
->obfd
,
8721 objfile
->obfd
->section_count
);
8722 arm_mapping_symbol_vec
&map
8723 = data
->section_maps
[bfd_asymbol_section (sym
)->index
];
8725 new_map_sym
.value
= sym
->value
;
8726 new_map_sym
.type
= name
[1];
8728 /* Insert at the end, the vector will be sorted on first use. */
8729 map
.push_back (new_map_sym
);
8733 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8735 struct gdbarch
*gdbarch
= regcache
->arch ();
8736 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8738 /* If necessary, set the T bit. */
8741 ULONGEST val
, t_bit
;
8742 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8743 t_bit
= arm_psr_thumb_bit (gdbarch
);
8744 if (arm_pc_is_thumb (gdbarch
, pc
))
8745 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8748 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8753 /* Read the contents of a NEON quad register, by reading from two
8754 double registers. This is used to implement the quad pseudo
8755 registers, and for argument passing in case the quad registers are
8756 missing; vectors are passed in quad registers when using the VFP
8757 ABI, even if a NEON unit is not present. REGNUM is the index of
8758 the quad register, in [0, 15]. */
8760 static enum register_status
8761 arm_neon_quad_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8762 int regnum
, gdb_byte
*buf
)
8765 gdb_byte reg_buf
[8];
8766 int offset
, double_regnum
;
8767 enum register_status status
;
8769 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8770 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8773 /* d0 is always the least significant half of q0. */
8774 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8779 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8780 if (status
!= REG_VALID
)
8782 memcpy (buf
+ offset
, reg_buf
, 8);
8784 offset
= 8 - offset
;
8785 status
= regcache
->raw_read (double_regnum
+ 1, reg_buf
);
8786 if (status
!= REG_VALID
)
8788 memcpy (buf
+ offset
, reg_buf
, 8);
8793 /* Read the contents of the MVE pseudo register REGNUM and store it
8796 static enum register_status
8797 arm_mve_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8798 int regnum
, gdb_byte
*buf
)
8800 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8802 /* P0 is the first 16 bits of VPR. */
8803 return regcache
->raw_read_part (tdep
->mve_vpr_regnum
, 0, 2, buf
);
8806 static enum register_status
8807 arm_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8808 int regnum
, gdb_byte
*buf
)
8810 const int num_regs
= gdbarch_num_regs (gdbarch
);
8812 gdb_byte reg_buf
[8];
8813 int offset
, double_regnum
;
8814 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8816 gdb_assert (regnum
>= num_regs
);
8818 if (is_q_pseudo (gdbarch
, regnum
))
8820 /* Quad-precision register. */
8821 return arm_neon_quad_read (gdbarch
, regcache
,
8822 regnum
- tdep
->q_pseudo_base
, buf
);
8824 else if (is_mve_pseudo (gdbarch
, regnum
))
8825 return arm_mve_pseudo_read (gdbarch
, regcache
, regnum
, buf
);
8828 enum register_status status
;
8830 regnum
-= tdep
->s_pseudo_base
;
8831 /* Single-precision register. */
8832 gdb_assert (regnum
< 32);
8834 /* s0 is always the least significant half of d0. */
8835 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8836 offset
= (regnum
& 1) ? 0 : 4;
8838 offset
= (regnum
& 1) ? 4 : 0;
8840 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8841 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8844 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8845 if (status
== REG_VALID
)
8846 memcpy (buf
, reg_buf
+ offset
, 4);
8851 /* Store the contents of BUF to a NEON quad register, by writing to
8852 two double registers. This is used to implement the quad pseudo
8853 registers, and for argument passing in case the quad registers are
8854 missing; vectors are passed in quad registers when using the VFP
8855 ABI, even if a NEON unit is not present. REGNUM is the index
8856 of the quad register, in [0, 15]. */
8859 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8860 int regnum
, const gdb_byte
*buf
)
8863 int offset
, double_regnum
;
8865 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8866 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8869 /* d0 is always the least significant half of q0. */
8870 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8875 regcache
->raw_write (double_regnum
, buf
+ offset
);
8876 offset
= 8 - offset
;
8877 regcache
->raw_write (double_regnum
+ 1, buf
+ offset
);
8880 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
8883 arm_mve_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8884 int regnum
, const gdb_byte
*buf
)
8886 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8888 /* P0 is the first 16 bits of VPR. */
8889 regcache
->raw_write_part (tdep
->mve_vpr_regnum
, 0, 2, buf
);
8893 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8894 int regnum
, const gdb_byte
*buf
)
8896 const int num_regs
= gdbarch_num_regs (gdbarch
);
8898 gdb_byte reg_buf
[8];
8899 int offset
, double_regnum
;
8900 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8902 gdb_assert (regnum
>= num_regs
);
8904 if (is_q_pseudo (gdbarch
, regnum
))
8906 /* Quad-precision register. */
8907 arm_neon_quad_write (gdbarch
, regcache
,
8908 regnum
- tdep
->q_pseudo_base
, buf
);
8910 else if (is_mve_pseudo (gdbarch
, regnum
))
8911 arm_mve_pseudo_write (gdbarch
, regcache
, regnum
, buf
);
8914 regnum
-= tdep
->s_pseudo_base
;
8915 /* Single-precision register. */
8916 gdb_assert (regnum
< 32);
8918 /* s0 is always the least significant half of d0. */
8919 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8920 offset
= (regnum
& 1) ? 0 : 4;
8922 offset
= (regnum
& 1) ? 4 : 0;
8924 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8925 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8928 regcache
->raw_read (double_regnum
, reg_buf
);
8929 memcpy (reg_buf
+ offset
, buf
, 4);
8930 regcache
->raw_write (double_regnum
, reg_buf
);
8934 static struct value
*
8935 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8937 const int *reg_p
= (const int *) baton
;
8938 return value_of_register (*reg_p
, frame
);
8941 static enum gdb_osabi
8942 arm_elf_osabi_sniffer (bfd
*abfd
)
8944 unsigned int elfosabi
;
8945 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8947 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8949 if (elfosabi
== ELFOSABI_ARM
)
8950 /* GNU tools use this value. Check note sections in this case,
8953 for (asection
*sect
: gdb_bfd_sections (abfd
))
8954 generic_elf_osabi_sniff_abi_tag_sections (abfd
, sect
, &osabi
);
8957 /* Anything else will be handled by the generic ELF sniffer. */
8962 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8963 struct reggroup
*group
)
8965 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8966 this, FPS register belongs to save_regroup, restore_reggroup, and
8967 all_reggroup, of course. */
8968 if (regnum
== ARM_FPS_REGNUM
)
8969 return (group
== float_reggroup
8970 || group
== save_reggroup
8971 || group
== restore_reggroup
8972 || group
== all_reggroup
);
8974 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8977 /* For backward-compatibility we allow two 'g' packet lengths with
8978 the remote protocol depending on whether FPA registers are
8979 supplied. M-profile targets do not have FPA registers, but some
8980 stubs already exist in the wild which use a 'g' packet which
8981 supplies them albeit with dummy values. The packet format which
8982 includes FPA registers should be considered deprecated for
8983 M-profile targets. */
8986 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8988 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
8992 const target_desc
*tdesc
;
8994 /* If we know from the executable this is an M-profile target,
8995 cater for remote targets whose register set layout is the
8996 same as the FPA layout. */
8997 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA
);
8998 register_remote_g_packet_guess (gdbarch
,
8999 ARM_CORE_REGS_SIZE
+ ARM_FP_REGS_SIZE
,
9002 /* The regular M-profile layout. */
9003 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE
);
9004 register_remote_g_packet_guess (gdbarch
, ARM_CORE_REGS_SIZE
,
9007 /* M-profile plus M4F VFP. */
9008 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_VFP_D16
);
9009 register_remote_g_packet_guess (gdbarch
,
9010 ARM_CORE_REGS_SIZE
+ ARM_VFP2_REGS_SIZE
,
9012 /* M-profile plus MVE. */
9013 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_MVE
);
9014 register_remote_g_packet_guess (gdbarch
, ARM_CORE_REGS_SIZE
9015 + ARM_VFP2_REGS_SIZE
9016 + ARM_INT_REGISTER_SIZE
, tdesc
);
9019 /* Otherwise we don't have a useful guess. */
9022 /* Implement the code_of_frame_writable gdbarch method. */
9025 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
9027 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
9029 if (tdep
->is_m
&& get_frame_type (frame
) == SIGTRAMP_FRAME
)
9031 /* M-profile exception frames return to some magic PCs, where
9032 isn't writable at all. */
9039 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9040 to be postfixed by a version (eg armv7hl). */
9043 arm_gnu_triplet_regexp (struct gdbarch
*gdbarch
)
9045 if (strcmp (gdbarch_bfd_arch_info (gdbarch
)->arch_name
, "arm") == 0)
9046 return "arm(v[^- ]*)?";
9047 return gdbarch_bfd_arch_info (gdbarch
)->arch_name
;
9050 /* Initialize the current architecture based on INFO. If possible,
9051 re-use an architecture from ARCHES, which is a list of
9052 architectures already created during this debugging session.
9054 Called e.g. at program startup, when reading a core file, and when
9055 reading a binary file. */
9057 static struct gdbarch
*
9058 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9060 struct gdbarch
*gdbarch
;
9061 struct gdbarch_list
*best_arch
;
9062 enum arm_abi_kind arm_abi
= arm_abi_global
;
9063 enum arm_float_model fp_model
= arm_fp_model
;
9064 tdesc_arch_data_up tdesc_data
;
9067 int vfp_register_count
= 0;
9068 bool have_s_pseudos
= false, have_q_pseudos
= false;
9069 bool have_wmmx_registers
= false;
9070 bool have_neon
= false;
9071 bool have_fpa_registers
= true;
9072 const struct target_desc
*tdesc
= info
.target_desc
;
9073 bool have_vfp
= false;
9074 bool have_mve
= false;
9075 int mve_vpr_regnum
= -1;
9076 int register_count
= ARM_NUM_REGS
;
9078 /* If we have an object to base this architecture on, try to determine
9081 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9083 int ei_osabi
, e_flags
;
9085 switch (bfd_get_flavour (info
.abfd
))
9087 case bfd_target_coff_flavour
:
9088 /* Assume it's an old APCS-style ABI. */
9090 arm_abi
= ARM_ABI_APCS
;
9093 case bfd_target_elf_flavour
:
9094 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9095 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9097 if (ei_osabi
== ELFOSABI_ARM
)
9099 /* GNU tools used to use this value, but do not for EABI
9100 objects. There's nowhere to tag an EABI version
9101 anyway, so assume APCS. */
9102 arm_abi
= ARM_ABI_APCS
;
9104 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
9106 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9110 case EF_ARM_EABI_UNKNOWN
:
9111 /* Assume GNU tools. */
9112 arm_abi
= ARM_ABI_APCS
;
9115 case EF_ARM_EABI_VER4
:
9116 case EF_ARM_EABI_VER5
:
9117 arm_abi
= ARM_ABI_AAPCS
;
9118 /* EABI binaries default to VFP float ordering.
9119 They may also contain build attributes that can
9120 be used to identify if the VFP argument-passing
9122 if (fp_model
== ARM_FLOAT_AUTO
)
9125 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9129 case AEABI_VFP_args_base
:
9130 /* "The user intended FP parameter/result
9131 passing to conform to AAPCS, base
9133 fp_model
= ARM_FLOAT_SOFT_VFP
;
9135 case AEABI_VFP_args_vfp
:
9136 /* "The user intended FP parameter/result
9137 passing to conform to AAPCS, VFP
9139 fp_model
= ARM_FLOAT_VFP
;
9141 case AEABI_VFP_args_toolchain
:
9142 /* "The user intended FP parameter/result
9143 passing to conform to tool chain-specific
9144 conventions" - we don't know any such
9145 conventions, so leave it as "auto". */
9147 case AEABI_VFP_args_compatible
:
9148 /* "Code is compatible with both the base
9149 and VFP variants; the user did not permit
9150 non-variadic functions to pass FP
9151 parameters/results" - leave it as
9155 /* Attribute value not mentioned in the
9156 November 2012 ABI, so leave it as
9161 fp_model
= ARM_FLOAT_SOFT_VFP
;
9167 /* Leave it as "auto". */
9168 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
9173 /* Detect M-profile programs. This only works if the
9174 executable file includes build attributes; GCC does
9175 copy them to the executable, but e.g. RealView does
9178 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9181 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9182 Tag_CPU_arch_profile
);
9184 /* GCC specifies the profile for v6-M; RealView only
9185 specifies the profile for architectures starting with
9186 V7 (as opposed to architectures with a tag
9187 numerically greater than TAG_CPU_ARCH_V7). */
9188 if (!tdesc_has_registers (tdesc
)
9189 && (attr_arch
== TAG_CPU_ARCH_V6_M
9190 || attr_arch
== TAG_CPU_ARCH_V6S_M
9191 || attr_arch
== TAG_CPU_ARCH_V8_1M_MAIN
9192 || attr_profile
== 'M'))
9197 if (fp_model
== ARM_FLOAT_AUTO
)
9199 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
9202 /* Leave it as "auto". Strictly speaking this case
9203 means FPA, but almost nobody uses that now, and
9204 many toolchains fail to set the appropriate bits
9205 for the floating-point model they use. */
9207 case EF_ARM_SOFT_FLOAT
:
9208 fp_model
= ARM_FLOAT_SOFT_FPA
;
9210 case EF_ARM_VFP_FLOAT
:
9211 fp_model
= ARM_FLOAT_VFP
;
9213 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
9214 fp_model
= ARM_FLOAT_SOFT_VFP
;
9219 if (e_flags
& EF_ARM_BE8
)
9220 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
9225 /* Leave it as "auto". */
9230 /* Check any target description for validity. */
9231 if (tdesc_has_registers (tdesc
))
9233 /* For most registers we require GDB's default names; but also allow
9234 the numeric names for sp / lr / pc, as a convenience. */
9235 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9236 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9237 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9239 const struct tdesc_feature
*feature
;
9242 feature
= tdesc_find_feature (tdesc
,
9243 "org.gnu.gdb.arm.core");
9244 if (feature
== NULL
)
9246 feature
= tdesc_find_feature (tdesc
,
9247 "org.gnu.gdb.arm.m-profile");
9248 if (feature
== NULL
)
9254 tdesc_data
= tdesc_data_alloc ();
9257 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9258 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9259 arm_register_names
[i
]);
9260 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9263 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9266 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9270 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9271 ARM_PS_REGNUM
, "xpsr");
9273 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9274 ARM_PS_REGNUM
, "cpsr");
9279 feature
= tdesc_find_feature (tdesc
,
9280 "org.gnu.gdb.arm.fpa");
9281 if (feature
!= NULL
)
9284 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9285 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9286 arm_register_names
[i
]);
9291 have_fpa_registers
= false;
9293 feature
= tdesc_find_feature (tdesc
,
9294 "org.gnu.gdb.xscale.iwmmxt");
9295 if (feature
!= NULL
)
9297 static const char *const iwmmxt_names
[] = {
9298 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9299 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9300 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9301 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9305 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9307 &= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9308 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9310 /* Check for the control registers, but do not fail if they
9312 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9313 tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9314 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9316 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9318 &= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9319 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9324 have_wmmx_registers
= true;
9327 /* If we have a VFP unit, check whether the single precision registers
9328 are present. If not, then we will synthesize them as pseudo
9330 feature
= tdesc_find_feature (tdesc
,
9331 "org.gnu.gdb.arm.vfp");
9332 if (feature
!= NULL
)
9334 static const char *const vfp_double_names
[] = {
9335 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9336 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9337 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9338 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9341 /* Require the double precision registers. There must be either
9344 for (i
= 0; i
< 32; i
++)
9346 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9348 vfp_double_names
[i
]);
9352 if (!valid_p
&& i
== 16)
9355 /* Also require FPSCR. */
9356 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9357 ARM_FPSCR_REGNUM
, "fpscr");
9363 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9364 have_s_pseudos
= true;
9366 vfp_register_count
= i
;
9368 /* If we have VFP, also check for NEON. The architecture allows
9369 NEON without VFP (integer vector operations only), but GDB
9370 does not support that. */
9371 feature
= tdesc_find_feature (tdesc
,
9372 "org.gnu.gdb.arm.neon");
9373 if (feature
!= NULL
)
9375 /* NEON requires 32 double-precision registers. */
9379 /* If there are quad registers defined by the stub, use
9380 their type; otherwise (normally) provide them with
9381 the default type. */
9382 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9383 have_q_pseudos
= true;
9387 /* Check for MVE after all the checks for GPR's, VFP and Neon.
9388 MVE (Helium) is an M-profile extension. */
9391 /* Do we have the MVE feature? */
9392 feature
= tdesc_find_feature (tdesc
,"org.gnu.gdb.arm.m-profile-mve");
9394 if (feature
!= nullptr)
9396 /* If we have MVE, we must always have the VPR register. */
9397 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9398 register_count
, "vpr");
9401 warning (_("MVE feature is missing required register vpr."));
9406 mve_vpr_regnum
= register_count
;
9409 /* We can't have Q pseudo registers available here, as that
9410 would mean we have NEON features, and that is only available
9411 on A and R profiles. */
9412 gdb_assert (!have_q_pseudos
);
9414 /* Given we have a M-profile target description, if MVE is
9415 enabled and there are VFP registers, we should have Q
9416 pseudo registers (Q0 ~ Q7). */
9418 have_q_pseudos
= true;
9423 /* If there is already a candidate, use it. */
9424 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9426 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9428 arm_gdbarch_tdep
*tdep
9429 = (arm_gdbarch_tdep
*) gdbarch_tdep (best_arch
->gdbarch
);
9431 if (arm_abi
!= ARM_ABI_AUTO
&& arm_abi
!= tdep
->arm_abi
)
9434 if (fp_model
!= ARM_FLOAT_AUTO
&& fp_model
!= tdep
->fp_model
)
9437 /* There are various other properties in tdep that we do not
9438 need to check here: those derived from a target description,
9439 since gdbarches with a different target description are
9440 automatically disqualified. */
9442 /* Do check is_m, though, since it might come from the binary. */
9443 if (is_m
!= tdep
->is_m
)
9446 /* Found a match. */
9450 if (best_arch
!= NULL
)
9451 return best_arch
->gdbarch
;
9453 arm_gdbarch_tdep
*tdep
= new arm_gdbarch_tdep
;
9454 gdbarch
= gdbarch_alloc (&info
, tdep
);
9456 /* Record additional information about the architecture we are defining.
9457 These are gdbarch discriminators, like the OSABI. */
9458 tdep
->arm_abi
= arm_abi
;
9459 tdep
->fp_model
= fp_model
;
9461 tdep
->have_fpa_registers
= have_fpa_registers
;
9462 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9463 gdb_assert (vfp_register_count
== 0
9464 || vfp_register_count
== 16
9465 || vfp_register_count
== 32);
9466 tdep
->vfp_register_count
= vfp_register_count
;
9467 tdep
->have_s_pseudos
= have_s_pseudos
;
9468 tdep
->have_q_pseudos
= have_q_pseudos
;
9469 tdep
->have_neon
= have_neon
;
9471 /* Adjust the MVE feature settings. */
9474 tdep
->have_mve
= true;
9475 tdep
->mve_vpr_regnum
= mve_vpr_regnum
;
9478 arm_register_g_packet_guesses (gdbarch
);
9481 switch (info
.byte_order_for_code
)
9483 case BFD_ENDIAN_BIG
:
9484 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9485 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9486 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9487 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9491 case BFD_ENDIAN_LITTLE
:
9492 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9493 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9494 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9495 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9500 internal_error (__FILE__
, __LINE__
,
9501 _("arm_gdbarch_init: bad byte order for float format"));
9504 /* On ARM targets char defaults to unsigned. */
9505 set_gdbarch_char_signed (gdbarch
, 0);
9507 /* wchar_t is unsigned under the AAPCS. */
9508 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
9509 set_gdbarch_wchar_signed (gdbarch
, 0);
9511 set_gdbarch_wchar_signed (gdbarch
, 1);
9513 /* Compute type alignment. */
9514 set_gdbarch_type_align (gdbarch
, arm_type_align
);
9516 /* Note: for displaced stepping, this includes the breakpoint, and one word
9517 of additional scratch space. This setting isn't used for anything beside
9518 displaced stepping at present. */
9519 set_gdbarch_max_insn_length (gdbarch
, 4 * ARM_DISPLACED_MODIFIED_INSNS
);
9521 /* This should be low enough for everything. */
9522 tdep
->lowest_pc
= 0x20;
9523 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9525 /* The default, for both APCS and AAPCS, is to return small
9526 structures in registers. */
9527 tdep
->struct_return
= reg_struct_return
;
9529 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9530 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9533 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9535 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9537 frame_base_set_default (gdbarch
, &arm_normal_base
);
9539 /* Address manipulation. */
9540 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9542 /* Advance PC across function entry code. */
9543 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9545 /* Detect whether PC is at a point where the stack has been destroyed. */
9546 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9548 /* Skip trampolines. */
9549 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9551 /* The stack grows downward. */
9552 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9554 /* Breakpoint manipulation. */
9555 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9556 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9557 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9558 arm_breakpoint_kind_from_current_state
);
9560 /* Information about registers, etc. */
9561 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9562 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9563 set_gdbarch_num_regs (gdbarch
, register_count
);
9564 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9565 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9567 /* This "info float" is FPA-specific. Use the generic version if we
9569 if (tdep
->have_fpa_registers
)
9570 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9572 /* Internal <-> external register number maps. */
9573 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9574 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9576 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9578 /* Returning results. */
9579 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9582 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9584 /* Minsymbol frobbing. */
9585 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9586 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9587 arm_coff_make_msymbol_special
);
9588 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9590 /* Thumb-2 IT block support. */
9591 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9592 arm_adjust_breakpoint_address
);
9594 /* Virtual tables. */
9595 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9597 /* Hook in the ABI-specific overrides, if they have been registered. */
9598 gdbarch_init_osabi (info
, gdbarch
);
9600 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9602 /* Add some default predicates. */
9604 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9605 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9606 dwarf2_append_unwinders (gdbarch
);
9607 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9608 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9609 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9611 /* Now we have tuned the configuration, set a few final things,
9612 based on what the OS ABI has told us. */
9614 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9615 binaries are always marked. */
9616 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9617 tdep
->arm_abi
= ARM_ABI_APCS
;
9619 /* Watchpoints are not steppable. */
9620 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9622 /* We used to default to FPA for generic ARM, but almost nobody
9623 uses that now, and we now provide a way for the user to force
9624 the model. So default to the most useful variant. */
9625 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9626 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9628 if (tdep
->jb_pc
>= 0)
9629 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9631 /* Floating point sizes and format. */
9632 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9633 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9635 set_gdbarch_double_format
9636 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9637 set_gdbarch_long_double_format
9638 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9642 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9643 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9646 if (tdesc_data
!= nullptr)
9648 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9650 tdesc_use_registers (gdbarch
, tdesc
, std::move (tdesc_data
));
9651 register_count
= gdbarch_num_regs (gdbarch
);
9653 /* Override tdesc_register_type to adjust the types of VFP
9654 registers for NEON. */
9655 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9658 /* Initialize the pseudo register data. */
9659 int num_pseudos
= 0;
9660 if (tdep
->have_s_pseudos
)
9662 /* VFP single precision pseudo registers (S0~S31). */
9663 tdep
->s_pseudo_base
= register_count
;
9664 tdep
->s_pseudo_count
= 32;
9665 num_pseudos
+= tdep
->s_pseudo_count
;
9667 if (tdep
->have_q_pseudos
)
9669 /* NEON quad precision pseudo registers (Q0~Q15). */
9670 tdep
->q_pseudo_base
= register_count
+ num_pseudos
;
9673 tdep
->q_pseudo_count
= 16;
9675 tdep
->q_pseudo_count
= ARM_MVE_NUM_Q_REGS
;
9677 num_pseudos
+= tdep
->q_pseudo_count
;
9681 /* Do we have any MVE pseudo registers? */
9684 tdep
->mve_pseudo_base
= register_count
+ num_pseudos
;
9685 tdep
->mve_pseudo_count
= 1;
9686 num_pseudos
+= tdep
->mve_pseudo_count
;
9689 /* Set some pseudo register hooks, if we have pseudo registers. */
9690 if (tdep
->have_s_pseudos
|| have_mve
)
9692 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9693 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9694 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9697 /* Add standard register aliases. We add aliases even for those
9698 names which are used by the current architecture - it's simpler,
9699 and does no harm, since nothing ever lists user registers. */
9700 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9701 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9702 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9704 set_gdbarch_disassembler_options (gdbarch
, &arm_disassembler_options
);
9705 set_gdbarch_valid_disassembler_options (gdbarch
, disassembler_options_arm ());
9707 set_gdbarch_gnu_triplet_regexp (gdbarch
, arm_gnu_triplet_regexp
);
9713 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9715 arm_gdbarch_tdep
*tdep
= (arm_gdbarch_tdep
*) gdbarch_tdep (gdbarch
);
9720 fprintf_filtered (file
, _("arm_dump_tdep: fp_model = %i\n"),
9721 (int) tdep
->fp_model
);
9722 fprintf_filtered (file
, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9723 (int) tdep
->have_fpa_registers
);
9724 fprintf_filtered (file
, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9725 (int) tdep
->have_wmmx_registers
);
9726 fprintf_filtered (file
, _("arm_dump_tdep: vfp_register_count = %i\n"),
9727 (int) tdep
->vfp_register_count
);
9728 fprintf_filtered (file
, _("arm_dump_tdep: have_s_pseudos = %s\n"),
9729 tdep
->have_s_pseudos
? "true" : "false");
9730 fprintf_filtered (file
, _("arm_dump_tdep: s_pseudo_base = %i\n"),
9731 (int) tdep
->s_pseudo_base
);
9732 fprintf_filtered (file
, _("arm_dump_tdep: s_pseudo_count = %i\n"),
9733 (int) tdep
->s_pseudo_count
);
9734 fprintf_filtered (file
, _("arm_dump_tdep: have_q_pseudos = %s\n"),
9735 tdep
->have_q_pseudos
? "true" : "false");
9736 fprintf_filtered (file
, _("arm_dump_tdep: q_pseudo_base = %i\n"),
9737 (int) tdep
->q_pseudo_base
);
9738 fprintf_filtered (file
, _("arm_dump_tdep: q_pseudo_count = %i\n"),
9739 (int) tdep
->q_pseudo_count
);
9740 fprintf_filtered (file
, _("arm_dump_tdep: have_neon = %i\n"),
9741 (int) tdep
->have_neon
);
9742 fprintf_filtered (file
, _("arm_dump_tdep: have_mve = %s\n"),
9743 tdep
->have_mve
? "yes" : "no");
9744 fprintf_filtered (file
, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
9745 tdep
->mve_vpr_regnum
);
9746 fprintf_filtered (file
, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
9747 tdep
->mve_pseudo_base
);
9748 fprintf_filtered (file
, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
9749 tdep
->mve_pseudo_count
);
9750 fprintf_filtered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9751 (unsigned long) tdep
->lowest_pc
);
9757 static void arm_record_test (void);
9758 static void arm_analyze_prologue_test ();
9762 void _initialize_arm_tdep ();
9764 _initialize_arm_tdep ()
9768 char regdesc
[1024], *rdptr
= regdesc
;
9769 size_t rest
= sizeof (regdesc
);
9771 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9773 /* Add ourselves to objfile event chain. */
9774 gdb::observers::new_objfile
.attach (arm_exidx_new_objfile
, "arm-tdep");
9776 /* Register an ELF OS ABI sniffer for ARM binaries. */
9777 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9778 bfd_target_elf_flavour
,
9779 arm_elf_osabi_sniffer
);
9781 /* Add root prefix command for all "set arm"/"show arm" commands. */
9782 add_setshow_prefix_cmd ("arm", no_class
,
9783 _("Various ARM-specific commands."),
9784 _("Various ARM-specific commands."),
9785 &setarmcmdlist
, &showarmcmdlist
,
9786 &setlist
, &showlist
);
9788 arm_disassembler_options
= xstrdup ("reg-names-std");
9789 const disasm_options_t
*disasm_options
9790 = &disassembler_options_arm ()->options
;
9791 int num_disassembly_styles
= 0;
9792 for (i
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9793 if (startswith (disasm_options
->name
[i
], "reg-names-"))
9794 num_disassembly_styles
++;
9796 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9797 valid_disassembly_styles
= XNEWVEC (const char *,
9798 num_disassembly_styles
+ 1);
9799 for (i
= j
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9800 if (startswith (disasm_options
->name
[i
], "reg-names-"))
9802 size_t offset
= strlen ("reg-names-");
9803 const char *style
= disasm_options
->name
[i
];
9804 valid_disassembly_styles
[j
++] = &style
[offset
];
9805 length
= snprintf (rdptr
, rest
, "%s - %s\n", &style
[offset
],
9806 disasm_options
->description
[i
]);
9810 /* Mark the end of valid options. */
9811 valid_disassembly_styles
[num_disassembly_styles
] = NULL
;
9813 /* Create the help text. */
9814 std::string helptext
= string_printf ("%s%s%s",
9815 _("The valid values are:\n"),
9817 _("The default is \"std\"."));
9819 add_setshow_enum_cmd("disassembler", no_class
,
9820 valid_disassembly_styles
, &disassembly_style
,
9821 _("Set the disassembly style."),
9822 _("Show the disassembly style."),
9824 set_disassembly_style_sfunc
,
9825 show_disassembly_style_sfunc
,
9826 &setarmcmdlist
, &showarmcmdlist
);
9828 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9829 _("Set usage of ARM 32-bit mode."),
9830 _("Show usage of ARM 32-bit mode."),
9831 _("When off, a 26-bit PC will be used."),
9833 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9835 &setarmcmdlist
, &showarmcmdlist
);
9837 /* Add a command to allow the user to force the FPU model. */
9838 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9839 _("Set the floating point type."),
9840 _("Show the floating point type."),
9841 _("auto - Determine the FP typefrom the OS-ABI.\n\
9842 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9843 fpa - FPA co-processor (GCC compiled).\n\
9844 softvfp - Software FP with pure-endian doubles.\n\
9845 vfp - VFP co-processor."),
9846 set_fp_model_sfunc
, show_fp_model
,
9847 &setarmcmdlist
, &showarmcmdlist
);
9849 /* Add a command to allow the user to force the ABI. */
9850 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9853 NULL
, arm_set_abi
, arm_show_abi
,
9854 &setarmcmdlist
, &showarmcmdlist
);
9856 /* Add two commands to allow the user to force the assumed
9858 add_setshow_enum_cmd ("fallback-mode", class_support
,
9859 arm_mode_strings
, &arm_fallback_mode_string
,
9860 _("Set the mode assumed when symbols are unavailable."),
9861 _("Show the mode assumed when symbols are unavailable."),
9862 NULL
, NULL
, arm_show_fallback_mode
,
9863 &setarmcmdlist
, &showarmcmdlist
);
9864 add_setshow_enum_cmd ("force-mode", class_support
,
9865 arm_mode_strings
, &arm_force_mode_string
,
9866 _("Set the mode assumed even when symbols are available."),
9867 _("Show the mode assumed even when symbols are available."),
9868 NULL
, NULL
, arm_show_force_mode
,
9869 &setarmcmdlist
, &showarmcmdlist
);
9871 /* Debugging flag. */
9872 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9873 _("Set ARM debugging."),
9874 _("Show ARM debugging."),
9875 _("When on, arm-specific debugging is enabled."),
9877 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9878 &setdebuglist
, &showdebuglist
);
9881 selftests::register_test ("arm-record", selftests::arm_record_test
);
9882 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test
);
9887 /* ARM-reversible process record data structures. */
9889 #define ARM_INSN_SIZE_BYTES 4
9890 #define THUMB_INSN_SIZE_BYTES 2
9891 #define THUMB2_INSN_SIZE_BYTES 4
9894 /* Position of the bit within a 32-bit ARM instruction
9895 that defines whether the instruction is a load or store. */
9896 #define INSN_S_L_BIT_NUM 20
9898 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9901 unsigned int reg_len = LENGTH; \
9904 REGS = XNEWVEC (uint32_t, reg_len); \
9905 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9910 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9913 unsigned int mem_len = LENGTH; \
9916 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9917 memcpy(&MEMS->len, &RECORD_BUF[0], \
9918 sizeof(struct arm_mem_r) * LENGTH); \
9923 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9924 #define INSN_RECORDED(ARM_RECORD) \
9925 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9927 /* ARM memory record structure. */
9930 uint32_t len
; /* Record length. */
9931 uint32_t addr
; /* Memory address. */
9934 /* ARM instruction record contains opcode of current insn
9935 and execution state (before entry to decode_insn()),
9936 contains list of to-be-modified registers and
9937 memory blocks (on return from decode_insn()). */
9939 typedef struct insn_decode_record_t
9941 struct gdbarch
*gdbarch
;
9942 struct regcache
*regcache
;
9943 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9944 uint32_t arm_insn
; /* Should accommodate thumb. */
9945 uint32_t cond
; /* Condition code. */
9946 uint32_t opcode
; /* Insn opcode. */
9947 uint32_t decode
; /* Insn decode bits. */
9948 uint32_t mem_rec_count
; /* No of mem records. */
9949 uint32_t reg_rec_count
; /* No of reg records. */
9950 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9951 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9952 } insn_decode_record
;
9955 /* Checks ARM SBZ and SBO mandatory fields. */
9958 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9960 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9979 enum arm_record_result
9981 ARM_RECORD_SUCCESS
= 0,
9982 ARM_RECORD_FAILURE
= 1
9989 } arm_record_strx_t
;
10000 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10001 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10004 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10005 ULONGEST u_regval
[2]= {0};
10007 uint32_t reg_src1
= 0, reg_src2
= 0;
10008 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10010 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10011 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10013 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10015 /* 1) Handle misc store, immediate offset. */
10016 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10017 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10018 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10019 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10021 if (ARM_PC_REGNUM
== reg_src1
)
10023 /* If R15 was used as Rn, hence current PC+8. */
10024 u_regval
[0] = u_regval
[0] + 8;
10026 offset_8
= (immed_high
<< 4) | immed_low
;
10027 /* Calculate target store address. */
10028 if (14 == arm_insn_r
->opcode
)
10030 tgt_mem_addr
= u_regval
[0] + offset_8
;
10034 tgt_mem_addr
= u_regval
[0] - offset_8
;
10036 if (ARM_RECORD_STRH
== str_type
)
10038 record_buf_mem
[0] = 2;
10039 record_buf_mem
[1] = tgt_mem_addr
;
10040 arm_insn_r
->mem_rec_count
= 1;
10042 else if (ARM_RECORD_STRD
== str_type
)
10044 record_buf_mem
[0] = 4;
10045 record_buf_mem
[1] = tgt_mem_addr
;
10046 record_buf_mem
[2] = 4;
10047 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10048 arm_insn_r
->mem_rec_count
= 2;
10051 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10053 /* 2) Store, register offset. */
10055 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10057 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10058 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10059 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10060 if (15 == reg_src2
)
10062 /* If R15 was used as Rn, hence current PC+8. */
10063 u_regval
[0] = u_regval
[0] + 8;
10065 /* Calculate target store address, Rn +/- Rm, register offset. */
10066 if (12 == arm_insn_r
->opcode
)
10068 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10072 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10074 if (ARM_RECORD_STRH
== str_type
)
10076 record_buf_mem
[0] = 2;
10077 record_buf_mem
[1] = tgt_mem_addr
;
10078 arm_insn_r
->mem_rec_count
= 1;
10080 else if (ARM_RECORD_STRD
== str_type
)
10082 record_buf_mem
[0] = 4;
10083 record_buf_mem
[1] = tgt_mem_addr
;
10084 record_buf_mem
[2] = 4;
10085 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10086 arm_insn_r
->mem_rec_count
= 2;
10089 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10090 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10092 /* 3) Store, immediate pre-indexed. */
10093 /* 5) Store, immediate post-indexed. */
10094 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10095 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10096 offset_8
= (immed_high
<< 4) | immed_low
;
10097 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10098 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10099 /* Calculate target store address, Rn +/- Rm, register offset. */
10100 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10102 tgt_mem_addr
= u_regval
[0] + offset_8
;
10106 tgt_mem_addr
= u_regval
[0] - offset_8
;
10108 if (ARM_RECORD_STRH
== str_type
)
10110 record_buf_mem
[0] = 2;
10111 record_buf_mem
[1] = tgt_mem_addr
;
10112 arm_insn_r
->mem_rec_count
= 1;
10114 else if (ARM_RECORD_STRD
== str_type
)
10116 record_buf_mem
[0] = 4;
10117 record_buf_mem
[1] = tgt_mem_addr
;
10118 record_buf_mem
[2] = 4;
10119 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10120 arm_insn_r
->mem_rec_count
= 2;
10122 /* Record Rn also as it changes. */
10123 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10124 arm_insn_r
->reg_rec_count
= 1;
10126 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10127 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10129 /* 4) Store, register pre-indexed. */
10130 /* 6) Store, register post -indexed. */
10131 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10132 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10133 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10134 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10135 /* Calculate target store address, Rn +/- Rm, register offset. */
10136 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10138 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10142 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10144 if (ARM_RECORD_STRH
== str_type
)
10146 record_buf_mem
[0] = 2;
10147 record_buf_mem
[1] = tgt_mem_addr
;
10148 arm_insn_r
->mem_rec_count
= 1;
10150 else if (ARM_RECORD_STRD
== str_type
)
10152 record_buf_mem
[0] = 4;
10153 record_buf_mem
[1] = tgt_mem_addr
;
10154 record_buf_mem
[2] = 4;
10155 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10156 arm_insn_r
->mem_rec_count
= 2;
10158 /* Record Rn also as it changes. */
10159 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10160 arm_insn_r
->reg_rec_count
= 1;
10165 /* Handling ARM extension space insns. */
10168 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10170 int ret
= 0; /* Return value: -1:record failure ; 0:success */
10171 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10172 uint32_t record_buf
[8], record_buf_mem
[8];
10173 uint32_t reg_src1
= 0;
10174 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10175 ULONGEST u_regval
= 0;
10177 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10178 /* Handle unconditional insn extension space. */
10180 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10181 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10182 if (arm_insn_r
->cond
)
10184 /* PLD has no affect on architectural state, it just affects
10186 if (5 == ((opcode1
& 0xE0) >> 5))
10189 record_buf
[0] = ARM_PS_REGNUM
;
10190 record_buf
[1] = ARM_LR_REGNUM
;
10191 arm_insn_r
->reg_rec_count
= 2;
10193 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10197 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10198 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10201 /* Undefined instruction on ARM V5; need to handle if later
10202 versions define it. */
10205 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
10206 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10207 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
10209 /* Handle arithmetic insn extension space. */
10210 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
10211 && !INSN_RECORDED(arm_insn_r
))
10213 /* Handle MLA(S) and MUL(S). */
10214 if (in_inclusive_range (insn_op1
, 0U, 3U))
10216 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10217 record_buf
[1] = ARM_PS_REGNUM
;
10218 arm_insn_r
->reg_rec_count
= 2;
10220 else if (in_inclusive_range (insn_op1
, 4U, 15U))
10222 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10223 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10224 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10225 record_buf
[2] = ARM_PS_REGNUM
;
10226 arm_insn_r
->reg_rec_count
= 3;
10230 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
10231 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
10232 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
10234 /* Handle control insn extension space. */
10236 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
10237 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
10239 if (!bit (arm_insn_r
->arm_insn
,25))
10241 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
10243 if ((0 == insn_op1
) || (2 == insn_op1
))
10246 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10247 arm_insn_r
->reg_rec_count
= 1;
10249 else if (1 == insn_op1
)
10251 /* CSPR is going to be changed. */
10252 record_buf
[0] = ARM_PS_REGNUM
;
10253 arm_insn_r
->reg_rec_count
= 1;
10255 else if (3 == insn_op1
)
10257 /* SPSR is going to be changed. */
10258 /* We need to get SPSR value, which is yet to be done. */
10262 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
10267 record_buf
[0] = ARM_PS_REGNUM
;
10268 arm_insn_r
->reg_rec_count
= 1;
10270 else if (3 == insn_op1
)
10273 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10274 arm_insn_r
->reg_rec_count
= 1;
10277 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
10280 record_buf
[0] = ARM_PS_REGNUM
;
10281 record_buf
[1] = ARM_LR_REGNUM
;
10282 arm_insn_r
->reg_rec_count
= 2;
10284 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
10286 /* QADD, QSUB, QDADD, QDSUB */
10287 record_buf
[0] = ARM_PS_REGNUM
;
10288 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10289 arm_insn_r
->reg_rec_count
= 2;
10291 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
10294 record_buf
[0] = ARM_PS_REGNUM
;
10295 record_buf
[1] = ARM_LR_REGNUM
;
10296 arm_insn_r
->reg_rec_count
= 2;
10298 /* Save SPSR also;how? */
10301 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
10302 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
10303 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
10304 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
10307 if (0 == insn_op1
|| 1 == insn_op1
)
10309 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10310 /* We dont do optimization for SMULW<y> where we
10312 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10313 record_buf
[1] = ARM_PS_REGNUM
;
10314 arm_insn_r
->reg_rec_count
= 2;
10316 else if (2 == insn_op1
)
10319 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10320 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
10321 arm_insn_r
->reg_rec_count
= 2;
10323 else if (3 == insn_op1
)
10326 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10327 arm_insn_r
->reg_rec_count
= 1;
10333 /* MSR : immediate form. */
10336 /* CSPR is going to be changed. */
10337 record_buf
[0] = ARM_PS_REGNUM
;
10338 arm_insn_r
->reg_rec_count
= 1;
10340 else if (3 == insn_op1
)
10342 /* SPSR is going to be changed. */
10343 /* we need to get SPSR value, which is yet to be done */
10349 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10350 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10351 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10353 /* Handle load/store insn extension space. */
10355 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10356 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10357 && !INSN_RECORDED(arm_insn_r
))
10362 /* These insn, changes register and memory as well. */
10363 /* SWP or SWPB insn. */
10364 /* Get memory address given by Rn. */
10365 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10366 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10367 /* SWP insn ?, swaps word. */
10368 if (8 == arm_insn_r
->opcode
)
10370 record_buf_mem
[0] = 4;
10374 /* SWPB insn, swaps only byte. */
10375 record_buf_mem
[0] = 1;
10377 record_buf_mem
[1] = u_regval
;
10378 arm_insn_r
->mem_rec_count
= 1;
10379 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10380 arm_insn_r
->reg_rec_count
= 1;
10382 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10385 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10388 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10391 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10392 record_buf
[1] = record_buf
[0] + 1;
10393 arm_insn_r
->reg_rec_count
= 2;
10395 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10398 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10401 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10403 /* LDRH, LDRSB, LDRSH. */
10404 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10405 arm_insn_r
->reg_rec_count
= 1;
10410 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10411 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10412 && !INSN_RECORDED(arm_insn_r
))
10415 /* Handle coprocessor insn extension space. */
10418 /* To be done for ARMv5 and later; as of now we return -1. */
10422 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10423 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10428 /* Handling opcode 000 insns. */
10431 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10433 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10434 uint32_t record_buf
[8], record_buf_mem
[8];
10435 ULONGEST u_regval
[2] = {0};
10437 uint32_t reg_src1
= 0;
10438 uint32_t opcode1
= 0;
10440 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10441 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10442 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10444 if (!((opcode1
& 0x19) == 0x10))
10446 /* Data-processing (register) and Data-processing (register-shifted
10448 /* Out of 11 shifter operands mode, all the insn modifies destination
10449 register, which is specified by 13-16 decode. */
10450 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10451 record_buf
[1] = ARM_PS_REGNUM
;
10452 arm_insn_r
->reg_rec_count
= 2;
10454 else if ((arm_insn_r
->decode
< 8) && ((opcode1
& 0x19) == 0x10))
10456 /* Miscellaneous instructions */
10458 if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10459 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10461 /* Handle BLX, branch and link/exchange. */
10462 if (9 == arm_insn_r
->opcode
)
10464 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10465 and R14 stores the return address. */
10466 record_buf
[0] = ARM_PS_REGNUM
;
10467 record_buf
[1] = ARM_LR_REGNUM
;
10468 arm_insn_r
->reg_rec_count
= 2;
10471 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10473 /* Handle enhanced software breakpoint insn, BKPT. */
10474 /* CPSR is changed to be executed in ARM state, disabling normal
10475 interrupts, entering abort mode. */
10476 /* According to high vector configuration PC is set. */
10477 /* user hit breakpoint and type reverse, in
10478 that case, we need to go back with previous CPSR and
10479 Program Counter. */
10480 record_buf
[0] = ARM_PS_REGNUM
;
10481 record_buf
[1] = ARM_LR_REGNUM
;
10482 arm_insn_r
->reg_rec_count
= 2;
10484 /* Save SPSR also; how? */
10487 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10488 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10490 /* Handle BX, branch and link/exchange. */
10491 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10492 record_buf
[0] = ARM_PS_REGNUM
;
10493 arm_insn_r
->reg_rec_count
= 1;
10495 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10496 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10497 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10499 /* Count leading zeros: CLZ. */
10500 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10501 arm_insn_r
->reg_rec_count
= 1;
10503 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10504 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10505 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10506 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0))
10508 /* Handle MRS insn. */
10509 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10510 arm_insn_r
->reg_rec_count
= 1;
10513 else if (9 == arm_insn_r
->decode
&& opcode1
< 0x10)
10515 /* Multiply and multiply-accumulate */
10517 /* Handle multiply instructions. */
10518 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10519 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10521 /* Handle MLA and MUL. */
10522 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10523 record_buf
[1] = ARM_PS_REGNUM
;
10524 arm_insn_r
->reg_rec_count
= 2;
10526 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10528 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10529 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10530 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10531 record_buf
[2] = ARM_PS_REGNUM
;
10532 arm_insn_r
->reg_rec_count
= 3;
10535 else if (9 == arm_insn_r
->decode
&& opcode1
> 0x10)
10537 /* Synchronization primitives */
10539 /* Handling SWP, SWPB. */
10540 /* These insn, changes register and memory as well. */
10541 /* SWP or SWPB insn. */
10543 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10544 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10545 /* SWP insn ?, swaps word. */
10546 if (8 == arm_insn_r
->opcode
)
10548 record_buf_mem
[0] = 4;
10552 /* SWPB insn, swaps only byte. */
10553 record_buf_mem
[0] = 1;
10555 record_buf_mem
[1] = u_regval
[0];
10556 arm_insn_r
->mem_rec_count
= 1;
10557 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10558 arm_insn_r
->reg_rec_count
= 1;
10560 else if (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
10561 || 15 == arm_insn_r
->decode
)
10563 if ((opcode1
& 0x12) == 2)
10565 /* Extra load/store (unprivileged) */
10570 /* Extra load/store */
10571 switch (bits (arm_insn_r
->arm_insn
, 5, 6))
10574 if ((opcode1
& 0x05) == 0x0 || (opcode1
& 0x05) == 0x4)
10576 /* STRH (register), STRH (immediate) */
10577 arm_record_strx (arm_insn_r
, &record_buf
[0],
10578 &record_buf_mem
[0], ARM_RECORD_STRH
);
10580 else if ((opcode1
& 0x05) == 0x1)
10582 /* LDRH (register) */
10583 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10584 arm_insn_r
->reg_rec_count
= 1;
10586 if (bit (arm_insn_r
->arm_insn
, 21))
10588 /* Write back to Rn. */
10589 record_buf
[arm_insn_r
->reg_rec_count
++]
10590 = bits (arm_insn_r
->arm_insn
, 16, 19);
10593 else if ((opcode1
& 0x05) == 0x5)
10595 /* LDRH (immediate), LDRH (literal) */
10596 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10598 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10599 arm_insn_r
->reg_rec_count
= 1;
10603 /*LDRH (immediate) */
10604 if (bit (arm_insn_r
->arm_insn
, 21))
10606 /* Write back to Rn. */
10607 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10615 if ((opcode1
& 0x05) == 0x0)
10617 /* LDRD (register) */
10618 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10619 record_buf
[1] = record_buf
[0] + 1;
10620 arm_insn_r
->reg_rec_count
= 2;
10622 if (bit (arm_insn_r
->arm_insn
, 21))
10624 /* Write back to Rn. */
10625 record_buf
[arm_insn_r
->reg_rec_count
++]
10626 = bits (arm_insn_r
->arm_insn
, 16, 19);
10629 else if ((opcode1
& 0x05) == 0x1)
10631 /* LDRSB (register) */
10632 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10633 arm_insn_r
->reg_rec_count
= 1;
10635 if (bit (arm_insn_r
->arm_insn
, 21))
10637 /* Write back to Rn. */
10638 record_buf
[arm_insn_r
->reg_rec_count
++]
10639 = bits (arm_insn_r
->arm_insn
, 16, 19);
10642 else if ((opcode1
& 0x05) == 0x4 || (opcode1
& 0x05) == 0x5)
10644 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10646 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10648 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10649 arm_insn_r
->reg_rec_count
= 1;
10653 /*LDRD (immediate), LDRSB (immediate) */
10654 if (bit (arm_insn_r
->arm_insn
, 21))
10656 /* Write back to Rn. */
10657 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10665 if ((opcode1
& 0x05) == 0x0)
10667 /* STRD (register) */
10668 arm_record_strx (arm_insn_r
, &record_buf
[0],
10669 &record_buf_mem
[0], ARM_RECORD_STRD
);
10671 else if ((opcode1
& 0x05) == 0x1)
10673 /* LDRSH (register) */
10674 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10675 arm_insn_r
->reg_rec_count
= 1;
10677 if (bit (arm_insn_r
->arm_insn
, 21))
10679 /* Write back to Rn. */
10680 record_buf
[arm_insn_r
->reg_rec_count
++]
10681 = bits (arm_insn_r
->arm_insn
, 16, 19);
10684 else if ((opcode1
& 0x05) == 0x4)
10686 /* STRD (immediate) */
10687 arm_record_strx (arm_insn_r
, &record_buf
[0],
10688 &record_buf_mem
[0], ARM_RECORD_STRD
);
10690 else if ((opcode1
& 0x05) == 0x5)
10692 /* LDRSH (immediate), LDRSH (literal) */
10693 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10694 arm_insn_r
->reg_rec_count
= 1;
10696 if (bit (arm_insn_r
->arm_insn
, 21))
10698 /* Write back to Rn. */
10699 record_buf
[arm_insn_r
->reg_rec_count
++]
10700 = bits (arm_insn_r
->arm_insn
, 16, 19);
10716 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10717 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10721 /* Handling opcode 001 insns. */
10724 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10726 uint32_t record_buf
[8], record_buf_mem
[8];
10728 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10729 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10731 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10732 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10733 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10736 /* Handle MSR insn. */
10737 if (9 == arm_insn_r
->opcode
)
10739 /* CSPR is going to be changed. */
10740 record_buf
[0] = ARM_PS_REGNUM
;
10741 arm_insn_r
->reg_rec_count
= 1;
10745 /* SPSR is going to be changed. */
10748 else if (arm_insn_r
->opcode
<= 15)
10750 /* Normal data processing insns. */
10751 /* Out of 11 shifter operands mode, all the insn modifies destination
10752 register, which is specified by 13-16 decode. */
10753 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10754 record_buf
[1] = ARM_PS_REGNUM
;
10755 arm_insn_r
->reg_rec_count
= 2;
10762 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10763 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10768 arm_record_media (insn_decode_record
*arm_insn_r
)
10770 uint32_t record_buf
[8];
10772 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10775 /* Parallel addition and subtraction, signed */
10777 /* Parallel addition and subtraction, unsigned */
10780 /* Packing, unpacking, saturation and reversal */
10782 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10784 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10790 /* Signed multiplies */
10792 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10793 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10795 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10797 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10798 else if (op1
== 0x4)
10799 record_buf
[arm_insn_r
->reg_rec_count
++]
10800 = bits (arm_insn_r
->arm_insn
, 12, 15);
10806 if (bit (arm_insn_r
->arm_insn
, 21)
10807 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10810 record_buf
[arm_insn_r
->reg_rec_count
++]
10811 = bits (arm_insn_r
->arm_insn
, 12, 15);
10813 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10814 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10816 /* USAD8 and USADA8 */
10817 record_buf
[arm_insn_r
->reg_rec_count
++]
10818 = bits (arm_insn_r
->arm_insn
, 16, 19);
10825 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10826 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10828 /* Permanently UNDEFINED */
10833 /* BFC, BFI and UBFX */
10834 record_buf
[arm_insn_r
->reg_rec_count
++]
10835 = bits (arm_insn_r
->arm_insn
, 12, 15);
10844 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10849 /* Handle ARM mode instructions with opcode 010. */
10852 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10854 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10856 uint32_t reg_base
, reg_dest
;
10857 uint32_t offset_12
, tgt_mem_addr
;
10858 uint32_t record_buf
[8], record_buf_mem
[8];
10859 unsigned char wback
;
10862 /* Calculate wback. */
10863 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10864 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10866 arm_insn_r
->reg_rec_count
= 0;
10867 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10869 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10871 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10874 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10875 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10877 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10878 preceeds a LDR instruction having R15 as reg_base, it
10879 emulates a branch and link instruction, and hence we need to save
10880 CPSR and PC as well. */
10881 if (ARM_PC_REGNUM
== reg_dest
)
10882 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10884 /* If wback is true, also save the base register, which is going to be
10887 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10891 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10893 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10894 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10896 /* Handle bit U. */
10897 if (bit (arm_insn_r
->arm_insn
, 23))
10899 /* U == 1: Add the offset. */
10900 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10904 /* U == 0: subtract the offset. */
10905 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10908 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10910 if (bit (arm_insn_r
->arm_insn
, 22))
10912 /* STRB and STRBT: 1 byte. */
10913 record_buf_mem
[0] = 1;
10917 /* STR and STRT: 4 bytes. */
10918 record_buf_mem
[0] = 4;
10921 /* Handle bit P. */
10922 if (bit (arm_insn_r
->arm_insn
, 24))
10923 record_buf_mem
[1] = tgt_mem_addr
;
10925 record_buf_mem
[1] = (uint32_t) u_regval
;
10927 arm_insn_r
->mem_rec_count
= 1;
10929 /* If wback is true, also save the base register, which is going to be
10932 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10935 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10936 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10940 /* Handling opcode 011 insns. */
10943 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10945 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10947 uint32_t shift_imm
= 0;
10948 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10949 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10950 uint32_t record_buf
[8], record_buf_mem
[8];
10953 ULONGEST u_regval
[2];
10955 if (bit (arm_insn_r
->arm_insn
, 4))
10956 return arm_record_media (arm_insn_r
);
10958 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10959 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10961 /* Handle enhanced store insns and LDRD DSP insn,
10962 order begins according to addressing modes for store insns
10966 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10968 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10969 /* LDR insn has a capability to do branching, if
10970 MOV LR, PC is preceded by LDR insn having Rn as R15
10971 in that case, it emulates branch and link insn, and hence we
10972 need to save CSPR and PC as well. */
10973 if (15 != reg_dest
)
10975 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10976 arm_insn_r
->reg_rec_count
= 1;
10980 record_buf
[0] = reg_dest
;
10981 record_buf
[1] = ARM_PS_REGNUM
;
10982 arm_insn_r
->reg_rec_count
= 2;
10987 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10989 /* Store insn, register offset and register pre-indexed,
10990 register post-indexed. */
10992 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10994 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10995 regcache_raw_read_unsigned (reg_cache
, reg_src1
10997 regcache_raw_read_unsigned (reg_cache
, reg_src2
10999 if (15 == reg_src2
)
11001 /* If R15 was used as Rn, hence current PC+8. */
11002 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11003 u_regval
[0] = u_regval
[0] + 8;
11005 /* Calculate target store address, Rn +/- Rm, register offset. */
11007 if (bit (arm_insn_r
->arm_insn
, 23))
11009 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11013 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11016 switch (arm_insn_r
->opcode
)
11030 record_buf_mem
[0] = 4;
11045 record_buf_mem
[0] = 1;
11049 gdb_assert_not_reached ("no decoding pattern found");
11052 record_buf_mem
[1] = tgt_mem_addr
;
11053 arm_insn_r
->mem_rec_count
= 1;
11055 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11056 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11057 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11058 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11059 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11060 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11063 /* Rn is going to be changed in pre-indexed mode and
11064 post-indexed mode as well. */
11065 record_buf
[0] = reg_src2
;
11066 arm_insn_r
->reg_rec_count
= 1;
11071 /* Store insn, scaled register offset; scaled pre-indexed. */
11072 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11074 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11076 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11077 /* Get shift_imm. */
11078 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11079 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11080 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11081 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11082 /* Offset_12 used as shift. */
11086 /* Offset_12 used as index. */
11087 offset_12
= u_regval
[0] << shift_imm
;
11091 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11097 if (bit (u_regval
[0], 31))
11099 offset_12
= 0xFFFFFFFF;
11108 /* This is arithmetic shift. */
11109 offset_12
= s_word
>> shift_imm
;
11116 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11118 /* Get C flag value and shift it by 31. */
11119 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11120 | (u_regval
[0]) >> 1);
11124 offset_12
= (u_regval
[0] >> shift_imm
) \
11126 (sizeof(uint32_t) - shift_imm
));
11131 gdb_assert_not_reached ("no decoding pattern found");
11135 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11137 if (bit (arm_insn_r
->arm_insn
, 23))
11139 tgt_mem_addr
= u_regval
[1] + offset_12
;
11143 tgt_mem_addr
= u_regval
[1] - offset_12
;
11146 switch (arm_insn_r
->opcode
)
11160 record_buf_mem
[0] = 4;
11175 record_buf_mem
[0] = 1;
11179 gdb_assert_not_reached ("no decoding pattern found");
11182 record_buf_mem
[1] = tgt_mem_addr
;
11183 arm_insn_r
->mem_rec_count
= 1;
11185 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11186 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11187 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11188 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11189 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11190 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11193 /* Rn is going to be changed in register scaled pre-indexed
11194 mode,and scaled post indexed mode. */
11195 record_buf
[0] = reg_src2
;
11196 arm_insn_r
->reg_rec_count
= 1;
11201 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11202 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11206 /* Handle ARM mode instructions with opcode 100. */
11209 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11211 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11212 uint32_t register_count
= 0, register_bits
;
11213 uint32_t reg_base
, addr_mode
;
11214 uint32_t record_buf
[24], record_buf_mem
[48];
11218 /* Fetch the list of registers. */
11219 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11220 arm_insn_r
->reg_rec_count
= 0;
11222 /* Fetch the base register that contains the address we are loading data
11224 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11226 /* Calculate wback. */
11227 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11229 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11231 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11233 /* Find out which registers are going to be loaded from memory. */
11234 while (register_bits
)
11236 if (register_bits
& 0x00000001)
11237 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11238 register_bits
= register_bits
>> 1;
11243 /* If wback is true, also save the base register, which is going to be
11246 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11248 /* Save the CPSR register. */
11249 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11253 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11255 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11257 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11259 /* Find out how many registers are going to be stored to memory. */
11260 while (register_bits
)
11262 if (register_bits
& 0x00000001)
11264 register_bits
= register_bits
>> 1;
11269 /* STMDA (STMED): Decrement after. */
11271 record_buf_mem
[1] = (uint32_t) u_regval
11272 - register_count
* ARM_INT_REGISTER_SIZE
+ 4;
11274 /* STM (STMIA, STMEA): Increment after. */
11276 record_buf_mem
[1] = (uint32_t) u_regval
;
11278 /* STMDB (STMFD): Decrement before. */
11280 record_buf_mem
[1] = (uint32_t) u_regval
11281 - register_count
* ARM_INT_REGISTER_SIZE
;
11283 /* STMIB (STMFA): Increment before. */
11285 record_buf_mem
[1] = (uint32_t) u_regval
+ ARM_INT_REGISTER_SIZE
;
11288 gdb_assert_not_reached ("no decoding pattern found");
11292 record_buf_mem
[0] = register_count
* ARM_INT_REGISTER_SIZE
;
11293 arm_insn_r
->mem_rec_count
= 1;
11295 /* If wback is true, also save the base register, which is going to be
11298 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11301 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11302 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11306 /* Handling opcode 101 insns. */
11309 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11311 uint32_t record_buf
[8];
11313 /* Handle B, BL, BLX(1) insns. */
11314 /* B simply branches so we do nothing here. */
11315 /* Note: BLX(1) doesnt fall here but instead it falls into
11316 extension space. */
11317 if (bit (arm_insn_r
->arm_insn
, 24))
11319 record_buf
[0] = ARM_LR_REGNUM
;
11320 arm_insn_r
->reg_rec_count
= 1;
11323 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11329 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11331 fprintf_unfiltered (gdb_stderr
,
11332 _("Process record does not support instruction "
11333 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11334 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11339 /* Record handler for vector data transfer instructions. */
11342 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11344 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11345 uint32_t record_buf
[4];
11347 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11348 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11349 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11350 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11351 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11353 /* Handle VMOV instruction. */
11354 if (bit_l
&& bit_c
)
11356 record_buf
[0] = reg_t
;
11357 arm_insn_r
->reg_rec_count
= 1;
11359 else if (bit_l
&& !bit_c
)
11361 /* Handle VMOV instruction. */
11362 if (bits_a
== 0x00)
11364 record_buf
[0] = reg_t
;
11365 arm_insn_r
->reg_rec_count
= 1;
11367 /* Handle VMRS instruction. */
11368 else if (bits_a
== 0x07)
11371 reg_t
= ARM_PS_REGNUM
;
11373 record_buf
[0] = reg_t
;
11374 arm_insn_r
->reg_rec_count
= 1;
11377 else if (!bit_l
&& !bit_c
)
11379 /* Handle VMOV instruction. */
11380 if (bits_a
== 0x00)
11382 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11384 arm_insn_r
->reg_rec_count
= 1;
11386 /* Handle VMSR instruction. */
11387 else if (bits_a
== 0x07)
11389 record_buf
[0] = ARM_FPSCR_REGNUM
;
11390 arm_insn_r
->reg_rec_count
= 1;
11393 else if (!bit_l
&& bit_c
)
11395 /* Handle VMOV instruction. */
11396 if (!(bits_a
& 0x04))
11398 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11400 arm_insn_r
->reg_rec_count
= 1;
11402 /* Handle VDUP instruction. */
11405 if (bit (arm_insn_r
->arm_insn
, 21))
11407 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11408 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11409 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11410 arm_insn_r
->reg_rec_count
= 2;
11414 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11415 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11416 arm_insn_r
->reg_rec_count
= 1;
11421 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11425 /* Record handler for extension register load/store instructions. */
11428 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11430 uint32_t opcode
, single_reg
;
11431 uint8_t op_vldm_vstm
;
11432 uint32_t record_buf
[8], record_buf_mem
[128];
11433 ULONGEST u_regval
= 0;
11435 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11437 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11438 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11439 op_vldm_vstm
= opcode
& 0x1b;
11441 /* Handle VMOV instructions. */
11442 if ((opcode
& 0x1e) == 0x04)
11444 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11446 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11447 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11448 arm_insn_r
->reg_rec_count
= 2;
11452 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11453 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11457 /* The first S register number m is REG_M:M (M is bit 5),
11458 the corresponding D register number is REG_M:M / 2, which
11460 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11461 /* The second S register number is REG_M:M + 1, the
11462 corresponding D register number is (REG_M:M + 1) / 2.
11463 IOW, if bit M is 1, the first and second S registers
11464 are mapped to different D registers, otherwise, they are
11465 in the same D register. */
11468 record_buf
[arm_insn_r
->reg_rec_count
++]
11469 = ARM_D0_REGNUM
+ reg_m
+ 1;
11474 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11475 arm_insn_r
->reg_rec_count
= 1;
11479 /* Handle VSTM and VPUSH instructions. */
11480 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11481 || op_vldm_vstm
== 0x12)
11483 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11484 uint32_t memory_index
= 0;
11486 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11487 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11488 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11489 imm_off32
= imm_off8
<< 2;
11490 memory_count
= imm_off8
;
11492 if (bit (arm_insn_r
->arm_insn
, 23))
11493 start_address
= u_regval
;
11495 start_address
= u_regval
- imm_off32
;
11497 if (bit (arm_insn_r
->arm_insn
, 21))
11499 record_buf
[0] = reg_rn
;
11500 arm_insn_r
->reg_rec_count
= 1;
11503 while (memory_count
> 0)
11507 record_buf_mem
[memory_index
] = 4;
11508 record_buf_mem
[memory_index
+ 1] = start_address
;
11509 start_address
= start_address
+ 4;
11510 memory_index
= memory_index
+ 2;
11514 record_buf_mem
[memory_index
] = 4;
11515 record_buf_mem
[memory_index
+ 1] = start_address
;
11516 record_buf_mem
[memory_index
+ 2] = 4;
11517 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11518 start_address
= start_address
+ 8;
11519 memory_index
= memory_index
+ 4;
11523 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11525 /* Handle VLDM instructions. */
11526 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11527 || op_vldm_vstm
== 0x13)
11529 uint32_t reg_count
, reg_vd
;
11530 uint32_t reg_index
= 0;
11531 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11533 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11534 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11536 /* REG_VD is the first D register number. If the instruction
11537 loads memory to S registers (SINGLE_REG is TRUE), the register
11538 number is (REG_VD << 1 | bit D), so the corresponding D
11539 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11541 reg_vd
= reg_vd
| (bit_d
<< 4);
11543 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11544 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11546 /* If the instruction loads memory to D register, REG_COUNT should
11547 be divided by 2, according to the ARM Architecture Reference
11548 Manual. If the instruction loads memory to S register, divide by
11549 2 as well because two S registers are mapped to D register. */
11550 reg_count
= reg_count
/ 2;
11551 if (single_reg
&& bit_d
)
11553 /* Increase the register count if S register list starts from
11554 an odd number (bit d is one). */
11558 while (reg_count
> 0)
11560 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11563 arm_insn_r
->reg_rec_count
= reg_index
;
11565 /* VSTR Vector store register. */
11566 else if ((opcode
& 0x13) == 0x10)
11568 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11569 uint32_t memory_index
= 0;
11571 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11572 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11573 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11574 imm_off32
= imm_off8
<< 2;
11576 if (bit (arm_insn_r
->arm_insn
, 23))
11577 start_address
= u_regval
+ imm_off32
;
11579 start_address
= u_regval
- imm_off32
;
11583 record_buf_mem
[memory_index
] = 4;
11584 record_buf_mem
[memory_index
+ 1] = start_address
;
11585 arm_insn_r
->mem_rec_count
= 1;
11589 record_buf_mem
[memory_index
] = 4;
11590 record_buf_mem
[memory_index
+ 1] = start_address
;
11591 record_buf_mem
[memory_index
+ 2] = 4;
11592 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11593 arm_insn_r
->mem_rec_count
= 2;
11596 /* VLDR Vector load register. */
11597 else if ((opcode
& 0x13) == 0x11)
11599 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11603 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11604 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11608 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11609 /* Record register D rather than pseudo register S. */
11610 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11612 arm_insn_r
->reg_rec_count
= 1;
11615 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11616 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11620 /* Record handler for arm/thumb mode VFP data processing instructions. */
11623 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11625 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11626 uint32_t record_buf
[4];
11627 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11628 enum insn_types curr_insn_type
= INSN_INV
;
11630 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11631 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11632 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11633 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11634 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11635 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11636 /* Mask off the "D" bit. */
11637 opc1
= opc1
& ~0x04;
11639 /* Handle VMLA, VMLS. */
11642 if (bit (arm_insn_r
->arm_insn
, 10))
11644 if (bit (arm_insn_r
->arm_insn
, 6))
11645 curr_insn_type
= INSN_T0
;
11647 curr_insn_type
= INSN_T1
;
11652 curr_insn_type
= INSN_T1
;
11654 curr_insn_type
= INSN_T2
;
11657 /* Handle VNMLA, VNMLS, VNMUL. */
11658 else if (opc1
== 0x01)
11661 curr_insn_type
= INSN_T1
;
11663 curr_insn_type
= INSN_T2
;
11666 else if (opc1
== 0x02 && !(opc3
& 0x01))
11668 if (bit (arm_insn_r
->arm_insn
, 10))
11670 if (bit (arm_insn_r
->arm_insn
, 6))
11671 curr_insn_type
= INSN_T0
;
11673 curr_insn_type
= INSN_T1
;
11678 curr_insn_type
= INSN_T1
;
11680 curr_insn_type
= INSN_T2
;
11683 /* Handle VADD, VSUB. */
11684 else if (opc1
== 0x03)
11686 if (!bit (arm_insn_r
->arm_insn
, 9))
11688 if (bit (arm_insn_r
->arm_insn
, 6))
11689 curr_insn_type
= INSN_T0
;
11691 curr_insn_type
= INSN_T1
;
11696 curr_insn_type
= INSN_T1
;
11698 curr_insn_type
= INSN_T2
;
11702 else if (opc1
== 0x08)
11705 curr_insn_type
= INSN_T1
;
11707 curr_insn_type
= INSN_T2
;
11709 /* Handle all other vfp data processing instructions. */
11710 else if (opc1
== 0x0b)
11713 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11715 if (bit (arm_insn_r
->arm_insn
, 4))
11717 if (bit (arm_insn_r
->arm_insn
, 6))
11718 curr_insn_type
= INSN_T0
;
11720 curr_insn_type
= INSN_T1
;
11725 curr_insn_type
= INSN_T1
;
11727 curr_insn_type
= INSN_T2
;
11730 /* Handle VNEG and VABS. */
11731 else if ((opc2
== 0x01 && opc3
== 0x01)
11732 || (opc2
== 0x00 && opc3
== 0x03))
11734 if (!bit (arm_insn_r
->arm_insn
, 11))
11736 if (bit (arm_insn_r
->arm_insn
, 6))
11737 curr_insn_type
= INSN_T0
;
11739 curr_insn_type
= INSN_T1
;
11744 curr_insn_type
= INSN_T1
;
11746 curr_insn_type
= INSN_T2
;
11749 /* Handle VSQRT. */
11750 else if (opc2
== 0x01 && opc3
== 0x03)
11753 curr_insn_type
= INSN_T1
;
11755 curr_insn_type
= INSN_T2
;
11758 else if (opc2
== 0x07 && opc3
== 0x03)
11761 curr_insn_type
= INSN_T1
;
11763 curr_insn_type
= INSN_T2
;
11765 else if (opc3
& 0x01)
11768 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11770 if (!bit (arm_insn_r
->arm_insn
, 18))
11771 curr_insn_type
= INSN_T2
;
11775 curr_insn_type
= INSN_T1
;
11777 curr_insn_type
= INSN_T2
;
11781 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11784 curr_insn_type
= INSN_T1
;
11786 curr_insn_type
= INSN_T2
;
11788 /* Handle VCVTB, VCVTT. */
11789 else if ((opc2
& 0x0e) == 0x02)
11790 curr_insn_type
= INSN_T2
;
11791 /* Handle VCMP, VCMPE. */
11792 else if ((opc2
& 0x0e) == 0x04)
11793 curr_insn_type
= INSN_T3
;
11797 switch (curr_insn_type
)
11800 reg_vd
= reg_vd
| (bit_d
<< 4);
11801 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11802 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11803 arm_insn_r
->reg_rec_count
= 2;
11807 reg_vd
= reg_vd
| (bit_d
<< 4);
11808 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11809 arm_insn_r
->reg_rec_count
= 1;
11813 reg_vd
= (reg_vd
<< 1) | bit_d
;
11814 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11815 arm_insn_r
->reg_rec_count
= 1;
11819 record_buf
[0] = ARM_FPSCR_REGNUM
;
11820 arm_insn_r
->reg_rec_count
= 1;
11824 gdb_assert_not_reached ("no decoding pattern found");
11828 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11832 /* Handling opcode 110 insns. */
11835 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11837 uint32_t op1
, op1_ebit
, coproc
;
11839 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11840 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11841 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11843 if ((coproc
& 0x0e) == 0x0a)
11845 /* Handle extension register ld/st instructions. */
11847 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11849 /* 64-bit transfers between arm core and extension registers. */
11850 if ((op1
& 0x3e) == 0x04)
11851 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11855 /* Handle coprocessor ld/st instructions. */
11860 return arm_record_unsupported_insn (arm_insn_r
);
11863 return arm_record_unsupported_insn (arm_insn_r
);
11866 /* Move to coprocessor from two arm core registers. */
11868 return arm_record_unsupported_insn (arm_insn_r
);
11870 /* Move to two arm core registers from coprocessor. */
11875 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11876 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11877 arm_insn_r
->reg_rec_count
= 2;
11879 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11883 return arm_record_unsupported_insn (arm_insn_r
);
11886 /* Handling opcode 111 insns. */
11889 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11891 uint32_t op
, op1_ebit
, coproc
, bits_24_25
;
11892 arm_gdbarch_tdep
*tdep
11893 = (arm_gdbarch_tdep
*) gdbarch_tdep (arm_insn_r
->gdbarch
);
11894 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11896 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11897 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11898 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11899 op
= bit (arm_insn_r
->arm_insn
, 4);
11900 bits_24_25
= bits (arm_insn_r
->arm_insn
, 24, 25);
11902 /* Handle arm SWI/SVC system call instructions. */
11903 if (bits_24_25
== 0x3)
11905 if (tdep
->arm_syscall_record
!= NULL
)
11907 ULONGEST svc_operand
, svc_number
;
11909 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11911 if (svc_operand
) /* OABI. */
11912 svc_number
= svc_operand
- 0x900000;
11914 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11916 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11920 fprintf_unfiltered (gdb_stderr
, _("no syscall record support\n"));
11924 else if (bits_24_25
== 0x02)
11928 if ((coproc
& 0x0e) == 0x0a)
11930 /* 8, 16, and 32-bit transfer */
11931 return arm_record_vdata_transfer_insn (arm_insn_r
);
11938 uint32_t record_buf
[1];
11940 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11941 if (record_buf
[0] == 15)
11942 record_buf
[0] = ARM_PS_REGNUM
;
11944 arm_insn_r
->reg_rec_count
= 1;
11945 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11958 if ((coproc
& 0x0e) == 0x0a)
11960 /* VFP data-processing instructions. */
11961 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11972 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11976 if ((coproc
& 0x0e) != 0x0a)
11982 else if (op1
== 4 || op1
== 5)
11984 if ((coproc
& 0x0e) == 0x0a)
11986 /* 64-bit transfers between ARM core and extension */
11995 else if (op1
== 0 || op1
== 1)
12002 if ((coproc
& 0x0e) == 0x0a)
12004 /* Extension register load/store */
12008 /* STC, STC2, LDC, LDC2 */
12017 /* Handling opcode 000 insns. */
12020 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12022 uint32_t record_buf
[8];
12023 uint32_t reg_src1
= 0;
12025 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12027 record_buf
[0] = ARM_PS_REGNUM
;
12028 record_buf
[1] = reg_src1
;
12029 thumb_insn_r
->reg_rec_count
= 2;
12031 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12037 /* Handling opcode 001 insns. */
12040 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12042 uint32_t record_buf
[8];
12043 uint32_t reg_src1
= 0;
12045 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12047 record_buf
[0] = ARM_PS_REGNUM
;
12048 record_buf
[1] = reg_src1
;
12049 thumb_insn_r
->reg_rec_count
= 2;
12051 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12056 /* Handling opcode 010 insns. */
12059 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12061 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12062 uint32_t record_buf
[8], record_buf_mem
[8];
12064 uint32_t reg_src1
= 0, reg_src2
= 0;
12065 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12067 ULONGEST u_regval
[2] = {0};
12069 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12071 if (bit (thumb_insn_r
->arm_insn
, 12))
12073 /* Handle load/store register offset. */
12074 uint32_t opB
= bits (thumb_insn_r
->arm_insn
, 9, 11);
12076 if (in_inclusive_range (opB
, 4U, 7U))
12078 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12079 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12080 record_buf
[0] = reg_src1
;
12081 thumb_insn_r
->reg_rec_count
= 1;
12083 else if (in_inclusive_range (opB
, 0U, 2U))
12085 /* STR(2), STRB(2), STRH(2) . */
12086 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12087 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12088 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12089 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12091 record_buf_mem
[0] = 4; /* STR (2). */
12093 record_buf_mem
[0] = 1; /* STRB (2). */
12095 record_buf_mem
[0] = 2; /* STRH (2). */
12096 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12097 thumb_insn_r
->mem_rec_count
= 1;
12100 else if (bit (thumb_insn_r
->arm_insn
, 11))
12102 /* Handle load from literal pool. */
12104 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12105 record_buf
[0] = reg_src1
;
12106 thumb_insn_r
->reg_rec_count
= 1;
12110 /* Special data instructions and branch and exchange */
12111 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12112 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12113 if ((3 == opcode2
) && (!opcode3
))
12115 /* Branch with exchange. */
12116 record_buf
[0] = ARM_PS_REGNUM
;
12117 thumb_insn_r
->reg_rec_count
= 1;
12121 /* Format 8; special data processing insns. */
12122 record_buf
[0] = ARM_PS_REGNUM
;
12123 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
12124 | bits (thumb_insn_r
->arm_insn
, 0, 2));
12125 thumb_insn_r
->reg_rec_count
= 2;
12130 /* Format 5; data processing insns. */
12131 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12132 if (bit (thumb_insn_r
->arm_insn
, 7))
12134 reg_src1
= reg_src1
+ 8;
12136 record_buf
[0] = ARM_PS_REGNUM
;
12137 record_buf
[1] = reg_src1
;
12138 thumb_insn_r
->reg_rec_count
= 2;
12141 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12142 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12148 /* Handling opcode 001 insns. */
12151 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12153 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12154 uint32_t record_buf
[8], record_buf_mem
[8];
12156 uint32_t reg_src1
= 0;
12157 uint32_t opcode
= 0, immed_5
= 0;
12159 ULONGEST u_regval
= 0;
12161 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12166 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12167 record_buf
[0] = reg_src1
;
12168 thumb_insn_r
->reg_rec_count
= 1;
12173 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12174 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12175 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12176 record_buf_mem
[0] = 4;
12177 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12178 thumb_insn_r
->mem_rec_count
= 1;
12181 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12182 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12188 /* Handling opcode 100 insns. */
12191 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12193 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12194 uint32_t record_buf
[8], record_buf_mem
[8];
12196 uint32_t reg_src1
= 0;
12197 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12199 ULONGEST u_regval
= 0;
12201 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12206 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12207 record_buf
[0] = reg_src1
;
12208 thumb_insn_r
->reg_rec_count
= 1;
12210 else if (1 == opcode
)
12213 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12214 record_buf
[0] = reg_src1
;
12215 thumb_insn_r
->reg_rec_count
= 1;
12217 else if (2 == opcode
)
12220 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12221 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12222 record_buf_mem
[0] = 4;
12223 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12224 thumb_insn_r
->mem_rec_count
= 1;
12226 else if (0 == opcode
)
12229 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12230 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12231 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12232 record_buf_mem
[0] = 2;
12233 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12234 thumb_insn_r
->mem_rec_count
= 1;
12237 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12238 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12244 /* Handling opcode 101 insns. */
12247 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12249 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12251 uint32_t opcode
= 0;
12252 uint32_t register_bits
= 0, register_count
= 0;
12253 uint32_t index
= 0, start_address
= 0;
12254 uint32_t record_buf
[24], record_buf_mem
[48];
12257 ULONGEST u_regval
= 0;
12259 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12261 if (opcode
== 0 || opcode
== 1)
12263 /* ADR and ADD (SP plus immediate) */
12265 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12266 record_buf
[0] = reg_src1
;
12267 thumb_insn_r
->reg_rec_count
= 1;
12271 /* Miscellaneous 16-bit instructions */
12272 uint32_t opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 11);
12277 /* SETEND and CPS */
12280 /* ADD/SUB (SP plus immediate) */
12281 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12282 record_buf
[0] = ARM_SP_REGNUM
;
12283 thumb_insn_r
->reg_rec_count
= 1;
12285 case 1: /* fall through */
12286 case 3: /* fall through */
12287 case 9: /* fall through */
12292 /* SXTH, SXTB, UXTH, UXTB */
12293 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12294 thumb_insn_r
->reg_rec_count
= 1;
12296 case 4: /* fall through */
12299 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12300 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12301 while (register_bits
)
12303 if (register_bits
& 0x00000001)
12305 register_bits
= register_bits
>> 1;
12307 start_address
= u_regval
- \
12308 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12309 thumb_insn_r
->mem_rec_count
= register_count
;
12310 while (register_count
)
12312 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12313 record_buf_mem
[(register_count
* 2) - 2] = 4;
12314 start_address
= start_address
+ 4;
12317 record_buf
[0] = ARM_SP_REGNUM
;
12318 thumb_insn_r
->reg_rec_count
= 1;
12321 /* REV, REV16, REVSH */
12322 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12323 thumb_insn_r
->reg_rec_count
= 1;
12325 case 12: /* fall through */
12328 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12329 while (register_bits
)
12331 if (register_bits
& 0x00000001)
12332 record_buf
[index
++] = register_count
;
12333 register_bits
= register_bits
>> 1;
12336 record_buf
[index
++] = ARM_PS_REGNUM
;
12337 record_buf
[index
++] = ARM_SP_REGNUM
;
12338 thumb_insn_r
->reg_rec_count
= index
;
12342 /* Handle enhanced software breakpoint insn, BKPT. */
12343 /* CPSR is changed to be executed in ARM state, disabling normal
12344 interrupts, entering abort mode. */
12345 /* According to high vector configuration PC is set. */
12346 /* User hits breakpoint and type reverse, in that case, we need to go back with
12347 previous CPSR and Program Counter. */
12348 record_buf
[0] = ARM_PS_REGNUM
;
12349 record_buf
[1] = ARM_LR_REGNUM
;
12350 thumb_insn_r
->reg_rec_count
= 2;
12351 /* We need to save SPSR value, which is not yet done. */
12352 fprintf_unfiltered (gdb_stderr
,
12353 _("Process record does not support instruction "
12354 "0x%0x at address %s.\n"),
12355 thumb_insn_r
->arm_insn
,
12356 paddress (thumb_insn_r
->gdbarch
,
12357 thumb_insn_r
->this_addr
));
12361 /* If-Then, and hints */
12368 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12369 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12375 /* Handling opcode 110 insns. */
12378 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12380 arm_gdbarch_tdep
*tdep
12381 = (arm_gdbarch_tdep
*) gdbarch_tdep (thumb_insn_r
->gdbarch
);
12382 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12384 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12385 uint32_t reg_src1
= 0;
12386 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12387 uint32_t index
= 0, start_address
= 0;
12388 uint32_t record_buf
[24], record_buf_mem
[48];
12390 ULONGEST u_regval
= 0;
12392 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12393 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12399 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12401 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12402 while (register_bits
)
12404 if (register_bits
& 0x00000001)
12405 record_buf
[index
++] = register_count
;
12406 register_bits
= register_bits
>> 1;
12409 record_buf
[index
++] = reg_src1
;
12410 thumb_insn_r
->reg_rec_count
= index
;
12412 else if (0 == opcode2
)
12414 /* It handles both STMIA. */
12415 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12417 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12418 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12419 while (register_bits
)
12421 if (register_bits
& 0x00000001)
12423 register_bits
= register_bits
>> 1;
12425 start_address
= u_regval
;
12426 thumb_insn_r
->mem_rec_count
= register_count
;
12427 while (register_count
)
12429 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12430 record_buf_mem
[(register_count
* 2) - 2] = 4;
12431 start_address
= start_address
+ 4;
12435 else if (0x1F == opcode1
)
12437 /* Handle arm syscall insn. */
12438 if (tdep
->arm_syscall_record
!= NULL
)
12440 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12441 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12445 fprintf_unfiltered (gdb_stderr
, _("no syscall record support\n"));
12450 /* B (1), conditional branch is automatically taken care in process_record,
12451 as PC is saved there. */
12453 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12454 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12460 /* Handling opcode 111 insns. */
12463 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12465 uint32_t record_buf
[8];
12466 uint32_t bits_h
= 0;
12468 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12470 if (2 == bits_h
|| 3 == bits_h
)
12473 record_buf
[0] = ARM_LR_REGNUM
;
12474 thumb_insn_r
->reg_rec_count
= 1;
12476 else if (1 == bits_h
)
12479 record_buf
[0] = ARM_PS_REGNUM
;
12480 record_buf
[1] = ARM_LR_REGNUM
;
12481 thumb_insn_r
->reg_rec_count
= 2;
12484 /* B(2) is automatically taken care in process_record, as PC is
12487 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12492 /* Handler for thumb2 load/store multiple instructions. */
12495 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12497 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12499 uint32_t reg_rn
, op
;
12500 uint32_t register_bits
= 0, register_count
= 0;
12501 uint32_t index
= 0, start_address
= 0;
12502 uint32_t record_buf
[24], record_buf_mem
[48];
12504 ULONGEST u_regval
= 0;
12506 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12507 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12509 if (0 == op
|| 3 == op
)
12511 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12513 /* Handle RFE instruction. */
12514 record_buf
[0] = ARM_PS_REGNUM
;
12515 thumb2_insn_r
->reg_rec_count
= 1;
12519 /* Handle SRS instruction after reading banked SP. */
12520 return arm_record_unsupported_insn (thumb2_insn_r
);
12523 else if (1 == op
|| 2 == op
)
12525 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12527 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12528 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12529 while (register_bits
)
12531 if (register_bits
& 0x00000001)
12532 record_buf
[index
++] = register_count
;
12535 register_bits
= register_bits
>> 1;
12537 record_buf
[index
++] = reg_rn
;
12538 record_buf
[index
++] = ARM_PS_REGNUM
;
12539 thumb2_insn_r
->reg_rec_count
= index
;
12543 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12544 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12545 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12546 while (register_bits
)
12548 if (register_bits
& 0x00000001)
12551 register_bits
= register_bits
>> 1;
12556 /* Start address calculation for LDMDB/LDMEA. */
12557 start_address
= u_regval
;
12561 /* Start address calculation for LDMDB/LDMEA. */
12562 start_address
= u_regval
- register_count
* 4;
12565 thumb2_insn_r
->mem_rec_count
= register_count
;
12566 while (register_count
)
12568 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12569 record_buf_mem
[register_count
* 2 - 2] = 4;
12570 start_address
= start_address
+ 4;
12573 record_buf
[0] = reg_rn
;
12574 record_buf
[1] = ARM_PS_REGNUM
;
12575 thumb2_insn_r
->reg_rec_count
= 2;
12579 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12581 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12583 return ARM_RECORD_SUCCESS
;
12586 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12590 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12592 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12594 uint32_t reg_rd
, reg_rn
, offset_imm
;
12595 uint32_t reg_dest1
, reg_dest2
;
12596 uint32_t address
, offset_addr
;
12597 uint32_t record_buf
[8], record_buf_mem
[8];
12598 uint32_t op1
, op2
, op3
;
12600 ULONGEST u_regval
[2];
12602 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12603 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12604 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12606 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12608 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12610 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12611 record_buf
[0] = reg_dest1
;
12612 record_buf
[1] = ARM_PS_REGNUM
;
12613 thumb2_insn_r
->reg_rec_count
= 2;
12616 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12618 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12619 record_buf
[2] = reg_dest2
;
12620 thumb2_insn_r
->reg_rec_count
= 3;
12625 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12626 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12628 if (0 == op1
&& 0 == op2
)
12630 /* Handle STREX. */
12631 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12632 address
= u_regval
[0] + (offset_imm
* 4);
12633 record_buf_mem
[0] = 4;
12634 record_buf_mem
[1] = address
;
12635 thumb2_insn_r
->mem_rec_count
= 1;
12636 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12637 record_buf
[0] = reg_rd
;
12638 thumb2_insn_r
->reg_rec_count
= 1;
12640 else if (1 == op1
&& 0 == op2
)
12642 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12643 record_buf
[0] = reg_rd
;
12644 thumb2_insn_r
->reg_rec_count
= 1;
12645 address
= u_regval
[0];
12646 record_buf_mem
[1] = address
;
12650 /* Handle STREXB. */
12651 record_buf_mem
[0] = 1;
12652 thumb2_insn_r
->mem_rec_count
= 1;
12656 /* Handle STREXH. */
12657 record_buf_mem
[0] = 2 ;
12658 thumb2_insn_r
->mem_rec_count
= 1;
12662 /* Handle STREXD. */
12663 address
= u_regval
[0];
12664 record_buf_mem
[0] = 4;
12665 record_buf_mem
[2] = 4;
12666 record_buf_mem
[3] = address
+ 4;
12667 thumb2_insn_r
->mem_rec_count
= 2;
12672 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12674 if (bit (thumb2_insn_r
->arm_insn
, 24))
12676 if (bit (thumb2_insn_r
->arm_insn
, 23))
12677 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12679 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12681 address
= offset_addr
;
12684 address
= u_regval
[0];
12686 record_buf_mem
[0] = 4;
12687 record_buf_mem
[1] = address
;
12688 record_buf_mem
[2] = 4;
12689 record_buf_mem
[3] = address
+ 4;
12690 thumb2_insn_r
->mem_rec_count
= 2;
12691 record_buf
[0] = reg_rn
;
12692 thumb2_insn_r
->reg_rec_count
= 1;
12696 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12698 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12700 return ARM_RECORD_SUCCESS
;
12703 /* Handler for thumb2 data processing (shift register and modified immediate)
12707 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12709 uint32_t reg_rd
, op
;
12710 uint32_t record_buf
[8];
12712 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12713 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12715 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12717 record_buf
[0] = ARM_PS_REGNUM
;
12718 thumb2_insn_r
->reg_rec_count
= 1;
12722 record_buf
[0] = reg_rd
;
12723 record_buf
[1] = ARM_PS_REGNUM
;
12724 thumb2_insn_r
->reg_rec_count
= 2;
12727 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12729 return ARM_RECORD_SUCCESS
;
12732 /* Generic handler for thumb2 instructions which effect destination and PS
12736 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12739 uint32_t record_buf
[8];
12741 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12743 record_buf
[0] = reg_rd
;
12744 record_buf
[1] = ARM_PS_REGNUM
;
12745 thumb2_insn_r
->reg_rec_count
= 2;
12747 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12749 return ARM_RECORD_SUCCESS
;
12752 /* Handler for thumb2 branch and miscellaneous control instructions. */
12755 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12757 uint32_t op
, op1
, op2
;
12758 uint32_t record_buf
[8];
12760 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12761 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12762 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12764 /* Handle MSR insn. */
12765 if (!(op1
& 0x2) && 0x38 == op
)
12769 /* CPSR is going to be changed. */
12770 record_buf
[0] = ARM_PS_REGNUM
;
12771 thumb2_insn_r
->reg_rec_count
= 1;
12775 arm_record_unsupported_insn(thumb2_insn_r
);
12779 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12782 record_buf
[0] = ARM_PS_REGNUM
;
12783 record_buf
[1] = ARM_LR_REGNUM
;
12784 thumb2_insn_r
->reg_rec_count
= 2;
12787 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12789 return ARM_RECORD_SUCCESS
;
12792 /* Handler for thumb2 store single data item instructions. */
12795 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12797 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12799 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12800 uint32_t address
, offset_addr
;
12801 uint32_t record_buf
[8], record_buf_mem
[8];
12804 ULONGEST u_regval
[2];
12806 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12807 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12808 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12809 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12811 if (bit (thumb2_insn_r
->arm_insn
, 23))
12814 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12815 offset_addr
= u_regval
[0] + offset_imm
;
12816 address
= offset_addr
;
12821 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12823 /* Handle STRB (register). */
12824 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12825 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12826 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12827 offset_addr
= u_regval
[1] << shift_imm
;
12828 address
= u_regval
[0] + offset_addr
;
12832 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12833 if (bit (thumb2_insn_r
->arm_insn
, 10))
12835 if (bit (thumb2_insn_r
->arm_insn
, 9))
12836 offset_addr
= u_regval
[0] + offset_imm
;
12838 offset_addr
= u_regval
[0] - offset_imm
;
12840 address
= offset_addr
;
12843 address
= u_regval
[0];
12849 /* Store byte instructions. */
12852 record_buf_mem
[0] = 1;
12854 /* Store half word instructions. */
12857 record_buf_mem
[0] = 2;
12859 /* Store word instructions. */
12862 record_buf_mem
[0] = 4;
12866 gdb_assert_not_reached ("no decoding pattern found");
12870 record_buf_mem
[1] = address
;
12871 thumb2_insn_r
->mem_rec_count
= 1;
12872 record_buf
[0] = reg_rn
;
12873 thumb2_insn_r
->reg_rec_count
= 1;
12875 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12877 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12879 return ARM_RECORD_SUCCESS
;
12882 /* Handler for thumb2 load memory hints instructions. */
12885 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12887 uint32_t record_buf
[8];
12888 uint32_t reg_rt
, reg_rn
;
12890 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12891 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12893 if (ARM_PC_REGNUM
!= reg_rt
)
12895 record_buf
[0] = reg_rt
;
12896 record_buf
[1] = reg_rn
;
12897 record_buf
[2] = ARM_PS_REGNUM
;
12898 thumb2_insn_r
->reg_rec_count
= 3;
12900 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12902 return ARM_RECORD_SUCCESS
;
12905 return ARM_RECORD_FAILURE
;
12908 /* Handler for thumb2 load word instructions. */
12911 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12913 uint32_t record_buf
[8];
12915 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12916 record_buf
[1] = ARM_PS_REGNUM
;
12917 thumb2_insn_r
->reg_rec_count
= 2;
12919 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12921 return ARM_RECORD_SUCCESS
;
12924 /* Handler for thumb2 long multiply, long multiply accumulate, and
12925 divide instructions. */
12928 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12930 uint32_t opcode1
= 0, opcode2
= 0;
12931 uint32_t record_buf
[8];
12933 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12934 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12936 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12938 /* Handle SMULL, UMULL, SMULAL. */
12939 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12940 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12941 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12942 record_buf
[2] = ARM_PS_REGNUM
;
12943 thumb2_insn_r
->reg_rec_count
= 3;
12945 else if (1 == opcode1
|| 3 == opcode2
)
12947 /* Handle SDIV and UDIV. */
12948 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12949 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12950 record_buf
[2] = ARM_PS_REGNUM
;
12951 thumb2_insn_r
->reg_rec_count
= 3;
12954 return ARM_RECORD_FAILURE
;
12956 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12958 return ARM_RECORD_SUCCESS
;
12961 /* Record handler for thumb32 coprocessor instructions. */
12964 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12966 if (bit (thumb2_insn_r
->arm_insn
, 25))
12967 return arm_record_coproc_data_proc (thumb2_insn_r
);
12969 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12972 /* Record handler for advance SIMD structure load/store instructions. */
12975 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12977 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12978 uint32_t l_bit
, a_bit
, b_bits
;
12979 uint32_t record_buf
[128], record_buf_mem
[128];
12980 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12981 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12984 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12985 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12986 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12987 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12988 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12989 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12990 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12991 f_elem
= 8 / f_ebytes
;
12995 ULONGEST u_regval
= 0;
12996 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12997 address
= u_regval
;
13002 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13004 if (b_bits
== 0x07)
13006 else if (b_bits
== 0x0a)
13008 else if (b_bits
== 0x06)
13010 else if (b_bits
== 0x02)
13015 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13017 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13019 record_buf_mem
[index_m
++] = f_ebytes
;
13020 record_buf_mem
[index_m
++] = address
;
13021 address
= address
+ f_ebytes
;
13022 thumb2_insn_r
->mem_rec_count
+= 1;
13027 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13029 if (b_bits
== 0x09 || b_bits
== 0x08)
13031 else if (b_bits
== 0x03)
13036 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13037 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13039 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13041 record_buf_mem
[index_m
++] = f_ebytes
;
13042 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13043 thumb2_insn_r
->mem_rec_count
+= 1;
13045 address
= address
+ (2 * f_ebytes
);
13049 else if ((b_bits
& 0x0e) == 0x04)
13051 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13053 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13055 record_buf_mem
[index_m
++] = f_ebytes
;
13056 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13057 thumb2_insn_r
->mem_rec_count
+= 1;
13059 address
= address
+ (3 * f_ebytes
);
13063 else if (!(b_bits
& 0x0e))
13065 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13067 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13069 record_buf_mem
[index_m
++] = f_ebytes
;
13070 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13071 thumb2_insn_r
->mem_rec_count
+= 1;
13073 address
= address
+ (4 * f_ebytes
);
13079 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13081 if (bft_size
== 0x00)
13083 else if (bft_size
== 0x01)
13085 else if (bft_size
== 0x02)
13091 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13092 thumb2_insn_r
->mem_rec_count
= 1;
13094 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13095 thumb2_insn_r
->mem_rec_count
= 2;
13097 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13098 thumb2_insn_r
->mem_rec_count
= 3;
13100 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13101 thumb2_insn_r
->mem_rec_count
= 4;
13103 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13105 record_buf_mem
[index_m
] = f_ebytes
;
13106 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13115 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13116 thumb2_insn_r
->reg_rec_count
= 1;
13118 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13119 thumb2_insn_r
->reg_rec_count
= 2;
13121 else if ((b_bits
& 0x0e) == 0x04)
13122 thumb2_insn_r
->reg_rec_count
= 3;
13124 else if (!(b_bits
& 0x0e))
13125 thumb2_insn_r
->reg_rec_count
= 4;
13130 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13131 thumb2_insn_r
->reg_rec_count
= 1;
13133 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13134 thumb2_insn_r
->reg_rec_count
= 2;
13136 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13137 thumb2_insn_r
->reg_rec_count
= 3;
13139 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13140 thumb2_insn_r
->reg_rec_count
= 4;
13142 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13143 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13147 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13149 record_buf
[index_r
] = reg_rn
;
13150 thumb2_insn_r
->reg_rec_count
+= 1;
13153 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13155 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13160 /* Decodes thumb2 instruction type and invokes its record handler. */
13162 static unsigned int
13163 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13165 uint32_t op
, op1
, op2
;
13167 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13168 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13169 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13173 if (!(op2
& 0x64 ))
13175 /* Load/store multiple instruction. */
13176 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13178 else if ((op2
& 0x64) == 0x4)
13180 /* Load/store (dual/exclusive) and table branch instruction. */
13181 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13183 else if ((op2
& 0x60) == 0x20)
13185 /* Data-processing (shifted register). */
13186 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13188 else if (op2
& 0x40)
13190 /* Co-processor instructions. */
13191 return thumb2_record_coproc_insn (thumb2_insn_r
);
13194 else if (op1
== 0x02)
13198 /* Branches and miscellaneous control instructions. */
13199 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13201 else if (op2
& 0x20)
13203 /* Data-processing (plain binary immediate) instruction. */
13204 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13208 /* Data-processing (modified immediate). */
13209 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13212 else if (op1
== 0x03)
13214 if (!(op2
& 0x71 ))
13216 /* Store single data item. */
13217 return thumb2_record_str_single_data (thumb2_insn_r
);
13219 else if (!((op2
& 0x71) ^ 0x10))
13221 /* Advanced SIMD or structure load/store instructions. */
13222 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13224 else if (!((op2
& 0x67) ^ 0x01))
13226 /* Load byte, memory hints instruction. */
13227 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13229 else if (!((op2
& 0x67) ^ 0x03))
13231 /* Load halfword, memory hints instruction. */
13232 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13234 else if (!((op2
& 0x67) ^ 0x05))
13236 /* Load word instruction. */
13237 return thumb2_record_ld_word (thumb2_insn_r
);
13239 else if (!((op2
& 0x70) ^ 0x20))
13241 /* Data-processing (register) instruction. */
13242 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13244 else if (!((op2
& 0x78) ^ 0x30))
13246 /* Multiply, multiply accumulate, abs diff instruction. */
13247 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13249 else if (!((op2
& 0x78) ^ 0x38))
13251 /* Long multiply, long multiply accumulate, and divide. */
13252 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13254 else if (op2
& 0x40)
13256 /* Co-processor instructions. */
13257 return thumb2_record_coproc_insn (thumb2_insn_r
);
13265 /* Abstract memory reader. */
13267 class abstract_memory_reader
13270 /* Read LEN bytes of target memory at address MEMADDR, placing the
13271 results in GDB's memory at BUF. Return true on success. */
13273 virtual bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) = 0;
13276 /* Instruction reader from real target. */
13278 class instruction_reader
: public abstract_memory_reader
13281 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13283 if (target_read_memory (memaddr
, buf
, len
))
13292 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13293 and positive val on failure. */
13296 extract_arm_insn (abstract_memory_reader
& reader
,
13297 insn_decode_record
*insn_record
, uint32_t insn_size
)
13299 gdb_byte buf
[insn_size
];
13301 memset (&buf
[0], 0, insn_size
);
13303 if (!reader
.read (insn_record
->this_addr
, buf
, insn_size
))
13305 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13307 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13311 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13313 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13317 decode_insn (abstract_memory_reader
&reader
, insn_decode_record
*arm_record
,
13318 record_type_t record_type
, uint32_t insn_size
)
13321 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13323 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13325 arm_record_data_proc_misc_ld_str
, /* 000. */
13326 arm_record_data_proc_imm
, /* 001. */
13327 arm_record_ld_st_imm_offset
, /* 010. */
13328 arm_record_ld_st_reg_offset
, /* 011. */
13329 arm_record_ld_st_multiple
, /* 100. */
13330 arm_record_b_bl
, /* 101. */
13331 arm_record_asimd_vfp_coproc
, /* 110. */
13332 arm_record_coproc_data_proc
/* 111. */
13335 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13337 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13339 thumb_record_shift_add_sub
, /* 000. */
13340 thumb_record_add_sub_cmp_mov
, /* 001. */
13341 thumb_record_ld_st_reg_offset
, /* 010. */
13342 thumb_record_ld_st_imm_offset
, /* 011. */
13343 thumb_record_ld_st_stack
, /* 100. */
13344 thumb_record_misc
, /* 101. */
13345 thumb_record_ldm_stm_swi
, /* 110. */
13346 thumb_record_branch
/* 111. */
13349 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13350 uint32_t insn_id
= 0;
13352 if (extract_arm_insn (reader
, arm_record
, insn_size
))
13356 fprintf_unfiltered (gdb_stdlog
,
13357 _("Process record: error reading memory at "
13358 "addr %s len = %d.\n"),
13359 paddress (arm_record
->gdbarch
,
13360 arm_record
->this_addr
), insn_size
);
13364 else if (ARM_RECORD
== record_type
)
13366 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13367 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13369 if (arm_record
->cond
== 0xf)
13370 ret
= arm_record_extension_space (arm_record
);
13373 /* If this insn has fallen into extension space
13374 then we need not decode it anymore. */
13375 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13377 if (ret
!= ARM_RECORD_SUCCESS
)
13379 arm_record_unsupported_insn (arm_record
);
13383 else if (THUMB_RECORD
== record_type
)
13385 /* As thumb does not have condition codes, we set negative. */
13386 arm_record
->cond
= -1;
13387 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13388 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13389 if (ret
!= ARM_RECORD_SUCCESS
)
13391 arm_record_unsupported_insn (arm_record
);
13395 else if (THUMB2_RECORD
== record_type
)
13397 /* As thumb does not have condition codes, we set negative. */
13398 arm_record
->cond
= -1;
13400 /* Swap first half of 32bit thumb instruction with second half. */
13401 arm_record
->arm_insn
13402 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13404 ret
= thumb2_record_decode_insn_handler (arm_record
);
13406 if (ret
!= ARM_RECORD_SUCCESS
)
13408 arm_record_unsupported_insn (arm_record
);
13414 /* Throw assertion. */
13415 gdb_assert_not_reached ("not a valid instruction, could not decode");
13422 namespace selftests
{
13424 /* Provide both 16-bit and 32-bit thumb instructions. */
13426 class instruction_reader_thumb
: public abstract_memory_reader
13429 template<size_t SIZE
>
13430 instruction_reader_thumb (enum bfd_endian endian
,
13431 const uint16_t (&insns
)[SIZE
])
13432 : m_endian (endian
), m_insns (insns
), m_insns_size (SIZE
)
13435 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13437 SELF_CHECK (len
== 4 || len
== 2);
13438 SELF_CHECK (memaddr
% 2 == 0);
13439 SELF_CHECK ((memaddr
/ 2) < m_insns_size
);
13441 store_unsigned_integer (buf
, 2, m_endian
, m_insns
[memaddr
/ 2]);
13444 store_unsigned_integer (&buf
[2], 2, m_endian
,
13445 m_insns
[memaddr
/ 2 + 1]);
13451 enum bfd_endian m_endian
;
13452 const uint16_t *m_insns
;
13453 size_t m_insns_size
;
13457 arm_record_test (void)
13459 struct gdbarch_info info
;
13460 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13462 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13464 SELF_CHECK (gdbarch
!= NULL
);
13466 /* 16-bit Thumb instructions. */
13468 insn_decode_record arm_record
;
13470 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13471 arm_record
.gdbarch
= gdbarch
;
13473 static const uint16_t insns
[] = {
13474 /* db b2 uxtb r3, r3 */
13476 /* cd 58 ldr r5, [r1, r3] */
13480 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13481 instruction_reader_thumb
reader (endian
, insns
);
13482 int ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13483 THUMB_INSN_SIZE_BYTES
);
13485 SELF_CHECK (ret
== 0);
13486 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13487 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13488 SELF_CHECK (arm_record
.arm_regs
[0] == 3);
13490 arm_record
.this_addr
+= 2;
13491 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13492 THUMB_INSN_SIZE_BYTES
);
13494 SELF_CHECK (ret
== 0);
13495 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13496 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13497 SELF_CHECK (arm_record
.arm_regs
[0] == 5);
13500 /* 32-bit Thumb-2 instructions. */
13502 insn_decode_record arm_record
;
13504 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13505 arm_record
.gdbarch
= gdbarch
;
13507 static const uint16_t insns
[] = {
13508 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13512 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13513 instruction_reader_thumb
reader (endian
, insns
);
13514 int ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13515 THUMB2_INSN_SIZE_BYTES
);
13517 SELF_CHECK (ret
== 0);
13518 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13519 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13520 SELF_CHECK (arm_record
.arm_regs
[0] == 7);
13524 /* Instruction reader from manually cooked instruction sequences. */
13526 class test_arm_instruction_reader
: public arm_instruction_reader
13529 explicit test_arm_instruction_reader (gdb::array_view
<const uint32_t> insns
)
13533 uint32_t read (CORE_ADDR memaddr
, enum bfd_endian byte_order
) const override
13535 SELF_CHECK (memaddr
% 4 == 0);
13536 SELF_CHECK (memaddr
/ 4 < m_insns
.size ());
13538 return m_insns
[memaddr
/ 4];
13542 const gdb::array_view
<const uint32_t> m_insns
;
13546 arm_analyze_prologue_test ()
13548 for (bfd_endian endianness
: {BFD_ENDIAN_LITTLE
, BFD_ENDIAN_BIG
})
13550 struct gdbarch_info info
;
13551 info
.byte_order
= endianness
;
13552 info
.byte_order_for_code
= endianness
;
13553 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13555 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13557 SELF_CHECK (gdbarch
!= NULL
);
13559 /* The "sub" instruction contains an immediate value rotate count of 0,
13560 which resulted in a 32-bit shift of a 32-bit value, caught by
13562 const uint32_t insns
[] = {
13563 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13564 0xe1a05000, /* mov r5, r0 */
13565 0xe5903020, /* ldr r3, [r0, #32] */
13566 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13569 test_arm_instruction_reader
mem_reader (insns
);
13570 arm_prologue_cache cache
;
13571 cache
.saved_regs
= trad_frame_alloc_saved_regs (gdbarch
);
13573 arm_analyze_prologue (gdbarch
, 0, sizeof (insns
) - 1, &cache
, mem_reader
);
13577 } // namespace selftests
13578 #endif /* GDB_SELF_TEST */
13580 /* Cleans up local record registers and memory allocations. */
13583 deallocate_reg_mem (insn_decode_record
*record
)
13585 xfree (record
->arm_regs
);
13586 xfree (record
->arm_mems
);
13590 /* Parse the current instruction and record the values of the registers and
13591 memory that will be changed in current instruction to record_arch_list".
13592 Return -1 if something is wrong. */
13595 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13596 CORE_ADDR insn_addr
)
13599 uint32_t no_of_rec
= 0;
13600 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13601 ULONGEST t_bit
= 0, insn_id
= 0;
13603 ULONGEST u_regval
= 0;
13605 insn_decode_record arm_record
;
13607 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13608 arm_record
.regcache
= regcache
;
13609 arm_record
.this_addr
= insn_addr
;
13610 arm_record
.gdbarch
= gdbarch
;
13613 if (record_debug
> 1)
13615 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13617 paddress (gdbarch
, arm_record
.this_addr
));
13620 instruction_reader reader
;
13621 if (extract_arm_insn (reader
, &arm_record
, 2))
13625 fprintf_unfiltered (gdb_stdlog
,
13626 _("Process record: error reading memory at "
13627 "addr %s len = %d.\n"),
13628 paddress (arm_record
.gdbarch
,
13629 arm_record
.this_addr
), 2);
13634 /* Check the insn, whether it is thumb or arm one. */
13636 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13637 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13640 if (!(u_regval
& t_bit
))
13642 /* We are decoding arm insn. */
13643 ret
= decode_insn (reader
, &arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13647 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13648 /* is it thumb2 insn? */
13649 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13651 ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13652 THUMB2_INSN_SIZE_BYTES
);
13656 /* We are decoding thumb insn. */
13657 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13658 THUMB_INSN_SIZE_BYTES
);
13664 /* Record registers. */
13665 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13666 if (arm_record
.arm_regs
)
13668 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13670 if (record_full_arch_list_add_reg
13671 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13675 /* Record memories. */
13676 if (arm_record
.arm_mems
)
13678 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13680 if (record_full_arch_list_add_mem
13681 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13682 arm_record
.arm_mems
[no_of_rec
].len
))
13687 if (record_full_arch_list_add_end ())
13692 deallocate_reg_mem (&arm_record
);
13697 /* See arm-tdep.h. */
13699 const target_desc
*
13700 arm_read_description (arm_fp_type fp_type
)
13702 struct target_desc
*tdesc
= tdesc_arm_list
[fp_type
];
13704 if (tdesc
== nullptr)
13706 tdesc
= arm_create_target_description (fp_type
);
13707 tdesc_arm_list
[fp_type
] = tdesc
;
13713 /* See arm-tdep.h. */
13715 const target_desc
*
13716 arm_read_mprofile_description (arm_m_profile_type m_type
)
13718 struct target_desc
*tdesc
= tdesc_arm_mprofile_list
[m_type
];
13720 if (tdesc
== nullptr)
13722 tdesc
= arm_create_mprofile_target_description (m_type
);
13723 tdesc_arm_mprofile_list
[m_type
] = tdesc
;