7e069d187708defe2c3a9ab1db8e3ee2c81e4102
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h>
23
24 #include "frame.h"
25 #include "language.h"
26 #include "inferior.h"
27 #include "infrun.h"
28 #include "gdbcmd.h"
29 #include "gdbcore.h"
30 #include "dis-asm.h"
31 #include "disasm.h"
32 #include "regcache.h"
33 #include "reggroups.h"
34 #include "target-float.h"
35 #include "value.h"
36 #include "arch-utils.h"
37 #include "osabi.h"
38 #include "frame-unwind.h"
39 #include "frame-base.h"
40 #include "trad-frame.h"
41 #include "objfiles.h"
42 #include "dwarf2.h"
43 #include "dwarf2/frame.h"
44 #include "gdbtypes.h"
45 #include "prologue-value.h"
46 #include "remote.h"
47 #include "target-descriptions.h"
48 #include "user-regs.h"
49 #include "observable.h"
50 #include "count-one-bits.h"
51
52 #include "arch/arm.h"
53 #include "arch/arm-get-next-pcs.h"
54 #include "arm-tdep.h"
55 #include "sim/sim-arm.h"
56
57 #include "elf-bfd.h"
58 #include "coff/internal.h"
59 #include "elf/arm.h"
60
61 #include "record.h"
62 #include "record-full.h"
63 #include <algorithm>
64
65 #include "producer.h"
66
67 #if GDB_SELF_TEST
68 #include "gdbsupport/selftest.h"
69 #endif
70
71 static bool arm_debug;
72
73 /* Print an "arm" debug statement. */
74
75 #define arm_debug_printf(fmt, ...) \
76 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 (msym)->set_target_flag_1 (true)
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 (msym)->target_flag_1 ()
90
91 struct arm_mapping_symbol
92 {
93 CORE_ADDR value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98 };
99
100 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102 struct arm_per_bfd
103 {
104 explicit arm_per_bfd (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124 };
125
126 /* Per-bfd data used for mapping symbols. */
127 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* All possible arm target descriptors. */
244 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
245 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
246
247 /* This is used to keep the bfd arch_info in sync with the disassembly
248 style. */
249 static void set_disassembly_style_sfunc (const char *, int,
250 struct cmd_list_element *);
251 static void show_disassembly_style_sfunc (struct ui_file *, int,
252 struct cmd_list_element *,
253 const char *);
254
255 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
256 readable_regcache *regcache,
257 int regnum, gdb_byte *buf);
258 static void arm_neon_quad_write (struct gdbarch *gdbarch,
259 struct regcache *regcache,
260 int regnum, const gdb_byte *buf);
261
262 static CORE_ADDR
263 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264
265
266 /* get_next_pcs operations. */
267 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
268 arm_get_next_pcs_read_memory_unsigned_integer,
269 arm_get_next_pcs_syscall_next_pc,
270 arm_get_next_pcs_addr_bits_remove,
271 arm_get_next_pcs_is_thumb,
272 NULL,
273 };
274
275 struct arm_prologue_cache
276 {
277 /* The stack pointer at the time this frame was created; i.e. the
278 caller's stack pointer when this function was called. It is used
279 to identify this frame. */
280 CORE_ADDR sp;
281
282 /* Additional stack pointers used by M-profile with Security extension. */
283 /* Use msp_s / psp_s to hold the values of msp / psp when there is
284 no Security extension. */
285 CORE_ADDR msp_s;
286 CORE_ADDR msp_ns;
287 CORE_ADDR psp_s;
288 CORE_ADDR psp_ns;
289
290 /* Active stack pointer. */
291 int active_sp_regnum;
292 int active_msp_regnum;
293 int active_psp_regnum;
294
295 /* The frame base for this frame is just prev_sp - frame size.
296 FRAMESIZE is the distance from the frame pointer to the
297 initial stack pointer. */
298
299 int framesize;
300
301 /* The register used to hold the frame pointer for this frame. */
302 int framereg;
303
304 /* True if the return address is signed, false otherwise. */
305 gdb::optional<bool> ra_signed_state;
306
307 /* Saved register offsets. */
308 trad_frame_saved_reg *saved_regs;
309
310 arm_prologue_cache() = default;
311 };
312
313
314 /* Reconstruct T bit in program status register from LR value. */
315
316 static inline ULONGEST
317 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
318 {
319 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
320 if (IS_THUMB_ADDR (lr))
321 psr |= t_bit;
322 else
323 psr &= ~t_bit;
324
325 return psr;
326 }
327
328 /* Initialize CACHE fields for which zero is not adequate (CACHE is
329 expected to have been ZALLOC'ed before calling this function). */
330
331 static void
332 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
333 {
334 cache->active_sp_regnum = ARM_SP_REGNUM;
335
336 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
337 }
338
339 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
340
341 static void
342 arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame)
343 {
344 struct gdbarch *gdbarch = get_frame_arch (frame);
345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
346
347 arm_cache_init (cache, gdbarch);
348 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
349
350 if (tdep->have_sec_ext)
351 {
352 const CORE_ADDR msp_val
353 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
354 const CORE_ADDR psp_val
355 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
356
357 cache->msp_s
358 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
359 cache->msp_ns
360 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
361 cache->psp_s
362 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
363 cache->psp_ns
364 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
365
366 /* Identify what msp is alias for (msp_s or msp_ns). */
367 if (msp_val == cache->msp_s)
368 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
369 else if (msp_val == cache->msp_ns)
370 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
371 else
372 {
373 warning (_("Invalid state, unable to determine msp alias, assuming "
374 "msp_s."));
375 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
376 }
377
378 /* Identify what psp is alias for (psp_s or psp_ns). */
379 if (psp_val == cache->psp_s)
380 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
381 else if (psp_val == cache->psp_ns)
382 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
383 else
384 {
385 warning (_("Invalid state, unable to determine psp alias, assuming "
386 "psp_s."));
387 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
388 }
389
390 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
391 if (msp_val == cache->sp)
392 cache->active_sp_regnum = cache->active_msp_regnum;
393 else if (psp_val == cache->sp)
394 cache->active_sp_regnum = cache->active_psp_regnum;
395 else
396 {
397 warning (_("Invalid state, unable to determine sp alias, assuming "
398 "msp."));
399 cache->active_sp_regnum = cache->active_msp_regnum;
400 }
401 }
402 else if (tdep->is_m)
403 {
404 cache->msp_s
405 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
406 cache->psp_s
407 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
408
409 /* Identify what sp is alias for (msp or psp). */
410 if (cache->msp_s == cache->sp)
411 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
412 else if (cache->psp_s == cache->sp)
413 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
414 else
415 {
416 warning (_("Invalid state, unable to determine sp alias, assuming "
417 "msp."));
418 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
419 }
420 }
421 else
422 {
423 cache->msp_s
424 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
425
426 cache->active_sp_regnum = ARM_SP_REGNUM;
427 }
428 }
429
430 /* Return the requested stack pointer value (in REGNUM), taking into
431 account whether we have a Security extension or an M-profile
432 CPU. */
433
434 static CORE_ADDR
435 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
436 arm_gdbarch_tdep *tdep, int regnum)
437 {
438 if (tdep->have_sec_ext)
439 {
440 if (regnum == tdep->m_profile_msp_s_regnum)
441 return cache->msp_s;
442 if (regnum == tdep->m_profile_msp_ns_regnum)
443 return cache->msp_ns;
444 if (regnum == tdep->m_profile_psp_s_regnum)
445 return cache->psp_s;
446 if (regnum == tdep->m_profile_psp_ns_regnum)
447 return cache->psp_ns;
448 if (regnum == tdep->m_profile_msp_regnum)
449 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
450 if (regnum == tdep->m_profile_psp_regnum)
451 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
452 if (regnum == ARM_SP_REGNUM)
453 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
454 }
455 else if (tdep->is_m)
456 {
457 if (regnum == tdep->m_profile_msp_regnum)
458 return cache->msp_s;
459 if (regnum == tdep->m_profile_psp_regnum)
460 return cache->psp_s;
461 if (regnum == ARM_SP_REGNUM)
462 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
463 }
464 else if (regnum == ARM_SP_REGNUM)
465 return cache->sp;
466
467 gdb_assert_not_reached ("Invalid SP selection");
468 }
469
470 /* Return the previous stack address, depending on which SP register
471 is active. */
472
473 static CORE_ADDR
474 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
475 {
476 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
477 return val;
478 }
479
480 /* Set the active stack pointer to VAL. */
481
482 static void
483 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
484 arm_gdbarch_tdep *tdep, CORE_ADDR val)
485 {
486 if (tdep->have_sec_ext)
487 {
488 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
489 cache->msp_s = val;
490 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
491 cache->msp_ns = val;
492 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
493 cache->psp_s = val;
494 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
495 cache->psp_ns = val;
496
497 return;
498 }
499 else if (tdep->is_m)
500 {
501 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
502 cache->msp_s = val;
503 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
504 cache->psp_s = val;
505
506 return;
507 }
508 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
509 {
510 cache->sp = val;
511 return;
512 }
513
514 gdb_assert_not_reached ("Invalid SP selection");
515 }
516
517 /* Return true if REGNUM is one of the alternative stack pointers. */
518
519 static bool
520 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
521 {
522 if ((regnum == tdep->m_profile_msp_regnum)
523 || (regnum == tdep->m_profile_msp_s_regnum)
524 || (regnum == tdep->m_profile_msp_ns_regnum)
525 || (regnum == tdep->m_profile_psp_regnum)
526 || (regnum == tdep->m_profile_psp_s_regnum)
527 || (regnum == tdep->m_profile_psp_ns_regnum))
528 return true;
529 else
530 return false;
531 }
532
533 /* Set the active stack pointer to SP_REGNUM. */
534
535 static void
536 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
537 arm_gdbarch_tdep *tdep, int sp_regnum)
538 {
539 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
540
541 if (tdep->have_sec_ext)
542 {
543 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
544 && sp_regnum != tdep->m_profile_psp_regnum);
545
546 if (sp_regnum == tdep->m_profile_msp_s_regnum
547 || sp_regnum == tdep->m_profile_psp_s_regnum)
548 {
549 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
550 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
551 }
552 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
553 || sp_regnum == tdep->m_profile_psp_ns_regnum)
554 {
555 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
556 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
557 }
558 }
559
560 cache->active_sp_regnum = sp_regnum;
561 }
562
563 namespace {
564
565 /* Abstract class to read ARM instructions from memory. */
566
567 class arm_instruction_reader
568 {
569 public:
570 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
571 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
572 };
573
574 /* Read instructions from target memory. */
575
576 class target_arm_instruction_reader : public arm_instruction_reader
577 {
578 public:
579 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
580 {
581 return read_code_unsigned_integer (memaddr, 4, byte_order);
582 }
583 };
584
585 } /* namespace */
586
587 static CORE_ADDR arm_analyze_prologue
588 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
589 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
590
591 /* Architecture version for displaced stepping. This effects the behaviour of
592 certain instructions, and really should not be hard-wired. */
593
594 #define DISPLACED_STEPPING_ARCH_VERSION 5
595
596 /* See arm-tdep.h. */
597
598 bool arm_apcs_32 = true;
599 bool arm_unwind_secure_frames = true;
600
601 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
602
603 int
604 arm_psr_thumb_bit (struct gdbarch *gdbarch)
605 {
606 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
607
608 if (tdep->is_m)
609 return XPSR_T;
610 else
611 return CPSR_T;
612 }
613
614 /* Determine if the processor is currently executing in Thumb mode. */
615
616 int
617 arm_is_thumb (struct regcache *regcache)
618 {
619 ULONGEST cpsr;
620 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
621
622 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
623
624 return (cpsr & t_bit) != 0;
625 }
626
627 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
628 frame. */
629
630 int
631 arm_frame_is_thumb (frame_info_ptr frame)
632 {
633 /* Check the architecture of FRAME. */
634 struct gdbarch *gdbarch = get_frame_arch (frame);
635 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
636
637 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
638 directly (from a signal frame or dummy frame) or by interpreting
639 the saved LR (from a prologue or DWARF frame). So consult it and
640 trust the unwinders. */
641 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
642
643 /* Find and extract the thumb bit. */
644 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
645 return (cpsr & t_bit) != 0;
646 }
647
648 /* Search for the mapping symbol covering MEMADDR. If one is found,
649 return its type. Otherwise, return 0. If START is non-NULL,
650 set *START to the location of the mapping symbol. */
651
652 static char
653 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
654 {
655 struct obj_section *sec;
656
657 /* If there are mapping symbols, consult them. */
658 sec = find_pc_section (memaddr);
659 if (sec != NULL)
660 {
661 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
662 if (data != NULL)
663 {
664 unsigned int section_idx = sec->the_bfd_section->index;
665 arm_mapping_symbol_vec &map
666 = data->section_maps[section_idx];
667
668 /* Sort the vector on first use. */
669 if (!data->section_maps_sorted[section_idx])
670 {
671 std::sort (map.begin (), map.end ());
672 data->section_maps_sorted[section_idx] = true;
673 }
674
675 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
676 arm_mapping_symbol_vec::const_iterator it
677 = std::lower_bound (map.begin (), map.end (), map_key);
678
679 /* std::lower_bound finds the earliest ordered insertion
680 point. If the symbol at this position starts at this exact
681 address, we use that; otherwise, the preceding
682 mapping symbol covers this address. */
683 if (it < map.end ())
684 {
685 if (it->value == map_key.value)
686 {
687 if (start)
688 *start = it->value + sec->addr ();
689 return it->type;
690 }
691 }
692
693 if (it > map.begin ())
694 {
695 arm_mapping_symbol_vec::const_iterator prev_it
696 = it - 1;
697
698 if (start)
699 *start = prev_it->value + sec->addr ();
700 return prev_it->type;
701 }
702 }
703 }
704
705 return 0;
706 }
707
708 /* Determine if the program counter specified in MEMADDR is in a Thumb
709 function. This function should be called for addresses unrelated to
710 any executing frame; otherwise, prefer arm_frame_is_thumb. */
711
712 int
713 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
714 {
715 struct bound_minimal_symbol sym;
716 char type;
717 arm_displaced_step_copy_insn_closure *dsc = nullptr;
718 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
719
720 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
721 dsc = ((arm_displaced_step_copy_insn_closure * )
722 gdbarch_displaced_step_copy_insn_closure_by_addr
723 (gdbarch, current_inferior (), memaddr));
724
725 /* If checking the mode of displaced instruction in copy area, the mode
726 should be determined by instruction on the original address. */
727 if (dsc)
728 {
729 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
730 (unsigned long) dsc->insn_addr,
731 (unsigned long) memaddr);
732 memaddr = dsc->insn_addr;
733 }
734
735 /* If bit 0 of the address is set, assume this is a Thumb address. */
736 if (IS_THUMB_ADDR (memaddr))
737 return 1;
738
739 /* If the user wants to override the symbol table, let him. */
740 if (strcmp (arm_force_mode_string, "arm") == 0)
741 return 0;
742 if (strcmp (arm_force_mode_string, "thumb") == 0)
743 return 1;
744
745 /* ARM v6-M and v7-M are always in Thumb mode. */
746 if (tdep->is_m)
747 return 1;
748
749 /* If there are mapping symbols, consult them. */
750 type = arm_find_mapping_symbol (memaddr, NULL);
751 if (type)
752 return type == 't';
753
754 /* Thumb functions have a "special" bit set in minimal symbols. */
755 sym = lookup_minimal_symbol_by_pc (memaddr);
756 if (sym.minsym)
757 return (MSYMBOL_IS_SPECIAL (sym.minsym));
758
759 /* If the user wants to override the fallback mode, let them. */
760 if (strcmp (arm_fallback_mode_string, "arm") == 0)
761 return 0;
762 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
763 return 1;
764
765 /* If we couldn't find any symbol, but we're talking to a running
766 target, then trust the current value of $cpsr. This lets
767 "display/i $pc" always show the correct mode (though if there is
768 a symbol table we will not reach here, so it still may not be
769 displayed in the mode it will be executed). */
770 if (target_has_registers ())
771 return arm_frame_is_thumb (get_current_frame ());
772
773 /* Otherwise we're out of luck; we assume ARM. */
774 return 0;
775 }
776
777 static inline bool
778 arm_m_addr_is_lockup (CORE_ADDR addr)
779 {
780 switch (addr)
781 {
782 /* Values for lockup state.
783 For more details see "B1.5.15 Unrecoverable exception cases" in
784 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
785 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
786 case 0xeffffffe:
787 case 0xfffffffe:
788 case 0xffffffff:
789 return true;
790
791 default:
792 /* Address is not lockup. */
793 return false;
794 }
795 }
796
797 /* Determine if the address specified equals any of these magic return
798 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
799 architectures. Also include lockup magic PC value.
800 Check also for FNC_RETURN if we have the v8-M security extension.
801
802 From ARMv6-M Reference Manual B1.5.8
803 Table B1-5 Exception return behavior
804
805 EXC_RETURN Return To Return Stack
806 0xFFFFFFF1 Handler mode Main
807 0xFFFFFFF9 Thread mode Main
808 0xFFFFFFFD Thread mode Process
809
810 From ARMv7-M Reference Manual B1.5.8
811 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
812
813 EXC_RETURN Return To Return Stack
814 0xFFFFFFF1 Handler mode Main
815 0xFFFFFFF9 Thread mode Main
816 0xFFFFFFFD Thread mode Process
817
818 Table B1-9 EXC_RETURN definition of exception return behavior, with
819 FP
820
821 EXC_RETURN Return To Return Stack Frame Type
822 0xFFFFFFE1 Handler mode Main Extended
823 0xFFFFFFE9 Thread mode Main Extended
824 0xFFFFFFED Thread mode Process Extended
825 0xFFFFFFF1 Handler mode Main Basic
826 0xFFFFFFF9 Thread mode Main Basic
827 0xFFFFFFFD Thread mode Process Basic
828
829 For more details see "B1.5.8 Exception return behavior"
830 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
831
832 From ARMv8-M Architecture Technical Reference, D1.2.95
833 FType, Mode and SPSEL bits are to be considered when the Security
834 Extension is not implemented.
835
836 EXC_RETURN Return To Return Stack Frame Type
837 0xFFFFFFA0 Handler mode Main Extended
838 0xFFFFFFA8 Thread mode Main Extended
839 0xFFFFFFAC Thread mode Process Extended
840 0xFFFFFFB0 Handler mode Main Standard
841 0xFFFFFFB8 Thread mode Main Standard
842 0xFFFFFFBC Thread mode Process Standard */
843
844 static int
845 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
846 {
847 if (arm_m_addr_is_lockup (addr))
848 return 1;
849
850 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
851 if (tdep->have_sec_ext)
852 {
853 switch ((addr & 0xff000000))
854 {
855 case 0xff000000: /* EXC_RETURN pattern. */
856 case 0xfe000000: /* FNC_RETURN pattern. */
857 return 1;
858 default:
859 return 0;
860 }
861 }
862 else
863 {
864 switch (addr)
865 {
866 /* Values from ARMv8-M Architecture Technical Reference. */
867 case 0xffffffa0:
868 case 0xffffffa8:
869 case 0xffffffac:
870 case 0xffffffb0:
871 case 0xffffffb8:
872 case 0xffffffbc:
873 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
874 the exception return behavior. */
875 case 0xffffffe1:
876 case 0xffffffe9:
877 case 0xffffffed:
878 case 0xfffffff1:
879 case 0xfffffff9:
880 case 0xfffffffd:
881 /* Address is magic. */
882 return 1;
883
884 default:
885 /* Address is not magic. */
886 return 0;
887 }
888 }
889 }
890
891 /* Remove useless bits from addresses in a running program. */
892 static CORE_ADDR
893 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
894 {
895 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
896
897 /* On M-profile devices, do not strip the low bit from EXC_RETURN
898 (the magic exception return address). */
899 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
900 return val;
901
902 if (arm_apcs_32)
903 return UNMAKE_THUMB_ADDR (val);
904 else
905 return (val & 0x03fffffc);
906 }
907
908 /* Return 1 if PC is the start of a compiler helper function which
909 can be safely ignored during prologue skipping. IS_THUMB is true
910 if the function is known to be a Thumb function due to the way it
911 is being called. */
912 static int
913 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
914 {
915 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
916 struct bound_minimal_symbol msym;
917
918 msym = lookup_minimal_symbol_by_pc (pc);
919 if (msym.minsym != NULL
920 && msym.value_address () == pc
921 && msym.minsym->linkage_name () != NULL)
922 {
923 const char *name = msym.minsym->linkage_name ();
924
925 /* The GNU linker's Thumb call stub to foo is named
926 __foo_from_thumb. */
927 if (strstr (name, "_from_thumb") != NULL)
928 name += 2;
929
930 /* On soft-float targets, __truncdfsf2 is called to convert promoted
931 arguments to their argument types in non-prototyped
932 functions. */
933 if (startswith (name, "__truncdfsf2"))
934 return 1;
935 if (startswith (name, "__aeabi_d2f"))
936 return 1;
937
938 /* Internal functions related to thread-local storage. */
939 if (startswith (name, "__tls_get_addr"))
940 return 1;
941 if (startswith (name, "__aeabi_read_tp"))
942 return 1;
943 }
944 else
945 {
946 /* If we run against a stripped glibc, we may be unable to identify
947 special functions by name. Check for one important case,
948 __aeabi_read_tp, by comparing the *code* against the default
949 implementation (this is hand-written ARM assembler in glibc). */
950
951 if (!is_thumb
952 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
953 == 0xe3e00a0f /* mov r0, #0xffff0fff */
954 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
955 == 0xe240f01f) /* sub pc, r0, #31 */
956 return 1;
957 }
958
959 return 0;
960 }
961
962 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
963 the first 16-bit of instruction, and INSN2 is the second 16-bit of
964 instruction. */
965 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
966 ((bits ((insn1), 0, 3) << 12) \
967 | (bits ((insn1), 10, 10) << 11) \
968 | (bits ((insn2), 12, 14) << 8) \
969 | bits ((insn2), 0, 7))
970
971 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
972 the 32-bit instruction. */
973 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
974 ((bits ((insn), 16, 19) << 12) \
975 | bits ((insn), 0, 11))
976
977 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
978
979 static unsigned int
980 thumb_expand_immediate (unsigned int imm)
981 {
982 unsigned int count = imm >> 7;
983
984 if (count < 8)
985 switch (count / 2)
986 {
987 case 0:
988 return imm & 0xff;
989 case 1:
990 return (imm & 0xff) | ((imm & 0xff) << 16);
991 case 2:
992 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
993 case 3:
994 return (imm & 0xff) | ((imm & 0xff) << 8)
995 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
996 }
997
998 return (0x80 | (imm & 0x7f)) << (32 - count);
999 }
1000
1001 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1002 epilogue, 0 otherwise. */
1003
1004 static int
1005 thumb_instruction_restores_sp (unsigned short insn)
1006 {
1007 return (insn == 0x46bd /* mov sp, r7 */
1008 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1009 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1010 }
1011
1012 /* Analyze a Thumb prologue, looking for a recognizable stack frame
1013 and frame pointer. Scan until we encounter a store that could
1014 clobber the stack frame unexpectedly, or an unknown instruction.
1015 Return the last address which is definitely safe to skip for an
1016 initial breakpoint. */
1017
1018 static CORE_ADDR
1019 thumb_analyze_prologue (struct gdbarch *gdbarch,
1020 CORE_ADDR start, CORE_ADDR limit,
1021 struct arm_prologue_cache *cache)
1022 {
1023 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1024 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1025 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1026 int i;
1027 pv_t regs[16];
1028 CORE_ADDR offset;
1029 CORE_ADDR unrecognized_pc = 0;
1030
1031 for (i = 0; i < 16; i++)
1032 regs[i] = pv_register (i, 0);
1033 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1034
1035 while (start < limit)
1036 {
1037 unsigned short insn;
1038 gdb::optional<bool> ra_signed_state;
1039
1040 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1041
1042 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1043 {
1044 int regno;
1045 int mask;
1046
1047 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1048 break;
1049
1050 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1051 whether to save LR (R14). */
1052 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1053
1054 /* Calculate offsets of saved R0-R7 and LR. */
1055 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1056 if (mask & (1 << regno))
1057 {
1058 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1059 -4);
1060 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1061 }
1062 }
1063 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1064 {
1065 offset = (insn & 0x7f) << 2; /* get scaled offset */
1066 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1067 -offset);
1068 }
1069 else if (thumb_instruction_restores_sp (insn))
1070 {
1071 /* Don't scan past the epilogue. */
1072 break;
1073 }
1074 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1075 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1076 (insn & 0xff) << 2);
1077 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1078 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1079 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1080 bits (insn, 6, 8));
1081 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1082 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1083 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1084 bits (insn, 0, 7));
1085 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1086 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1087 && pv_is_constant (regs[bits (insn, 3, 5)]))
1088 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1089 regs[bits (insn, 6, 8)]);
1090 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1091 && pv_is_constant (regs[bits (insn, 3, 6)]))
1092 {
1093 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1094 int rm = bits (insn, 3, 6);
1095 regs[rd] = pv_add (regs[rd], regs[rm]);
1096 }
1097 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1098 {
1099 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1100 int src_reg = (insn & 0x78) >> 3;
1101 regs[dst_reg] = regs[src_reg];
1102 }
1103 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1104 {
1105 /* Handle stores to the stack. Normally pushes are used,
1106 but with GCC -mtpcs-frame, there may be other stores
1107 in the prologue to create the frame. */
1108 int regno = (insn >> 8) & 0x7;
1109 pv_t addr;
1110
1111 offset = (insn & 0xff) << 2;
1112 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1113
1114 if (stack.store_would_trash (addr))
1115 break;
1116
1117 stack.store (addr, 4, regs[regno]);
1118 }
1119 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1120 {
1121 int rd = bits (insn, 0, 2);
1122 int rn = bits (insn, 3, 5);
1123 pv_t addr;
1124
1125 offset = bits (insn, 6, 10) << 2;
1126 addr = pv_add_constant (regs[rn], offset);
1127
1128 if (stack.store_would_trash (addr))
1129 break;
1130
1131 stack.store (addr, 4, regs[rd]);
1132 }
1133 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1134 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1135 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1136 /* Ignore stores of argument registers to the stack. */
1137 ;
1138 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1139 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1140 /* Ignore block loads from the stack, potentially copying
1141 parameters from memory. */
1142 ;
1143 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1144 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1145 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1146 /* Similarly ignore single loads from the stack. */
1147 ;
1148 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1149 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1150 /* Skip register copies, i.e. saves to another register
1151 instead of the stack. */
1152 ;
1153 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1154 /* Recognize constant loads; even with small stacks these are necessary
1155 on Thumb. */
1156 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1157 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1158 {
1159 /* Constant pool loads, for the same reason. */
1160 unsigned int constant;
1161 CORE_ADDR loc;
1162
1163 loc = start + 4 + bits (insn, 0, 7) * 4;
1164 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1165 regs[bits (insn, 8, 10)] = pv_constant (constant);
1166 }
1167 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1168 {
1169 unsigned short inst2;
1170
1171 inst2 = read_code_unsigned_integer (start + 2, 2,
1172 byte_order_for_code);
1173 uint32_t whole_insn = (insn << 16) | inst2;
1174
1175 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1176 {
1177 /* BL, BLX. Allow some special function calls when
1178 skipping the prologue; GCC generates these before
1179 storing arguments to the stack. */
1180 CORE_ADDR nextpc;
1181 int j1, j2, imm1, imm2;
1182
1183 imm1 = sbits (insn, 0, 10);
1184 imm2 = bits (inst2, 0, 10);
1185 j1 = bit (inst2, 13);
1186 j2 = bit (inst2, 11);
1187
1188 offset = ((imm1 << 12) + (imm2 << 1));
1189 offset ^= ((!j2) << 22) | ((!j1) << 23);
1190
1191 nextpc = start + 4 + offset;
1192 /* For BLX make sure to clear the low bits. */
1193 if (bit (inst2, 12) == 0)
1194 nextpc = nextpc & 0xfffffffc;
1195
1196 if (!skip_prologue_function (gdbarch, nextpc,
1197 bit (inst2, 12) != 0))
1198 break;
1199 }
1200
1201 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1202 { registers } */
1203 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1204 {
1205 pv_t addr = regs[bits (insn, 0, 3)];
1206 int regno;
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 /* Calculate offsets of saved registers. */
1212 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1213 if (inst2 & (1 << regno))
1214 {
1215 addr = pv_add_constant (addr, -4);
1216 stack.store (addr, 4, regs[regno]);
1217 }
1218
1219 if (insn & 0x0020)
1220 regs[bits (insn, 0, 3)] = addr;
1221 }
1222
1223 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1224 else if ((insn & 0xff20) == 0xed20
1225 && (inst2 & 0x0f00) == 0x0b00
1226 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1227 {
1228 /* Address SP points to. */
1229 pv_t addr = regs[bits (insn, 0, 3)];
1230
1231 /* Number of registers saved. */
1232 unsigned int number = bits (inst2, 0, 7) >> 1;
1233
1234 /* First register to save. */
1235 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1236
1237 if (stack.store_would_trash (addr))
1238 break;
1239
1240 /* Calculate offsets of saved registers. */
1241 for (; number > 0; number--)
1242 {
1243 addr = pv_add_constant (addr, -8);
1244 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1245 + vd + number, 0));
1246 }
1247
1248 /* Writeback SP to account for the saved registers. */
1249 regs[bits (insn, 0, 3)] = addr;
1250 }
1251
1252 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1253 [Rn, #+/-imm]{!} */
1254 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1255 {
1256 int regno1 = bits (inst2, 12, 15);
1257 int regno2 = bits (inst2, 8, 11);
1258 pv_t addr = regs[bits (insn, 0, 3)];
1259
1260 offset = inst2 & 0xff;
1261 if (insn & 0x0080)
1262 addr = pv_add_constant (addr, offset);
1263 else
1264 addr = pv_add_constant (addr, -offset);
1265
1266 if (stack.store_would_trash (addr))
1267 break;
1268
1269 stack.store (addr, 4, regs[regno1]);
1270 stack.store (pv_add_constant (addr, 4),
1271 4, regs[regno2]);
1272
1273 if (insn & 0x0020)
1274 regs[bits (insn, 0, 3)] = addr;
1275 }
1276
1277 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1278 && (inst2 & 0x0c00) == 0x0c00
1279 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1280 {
1281 int regno = bits (inst2, 12, 15);
1282 pv_t addr = regs[bits (insn, 0, 3)];
1283
1284 offset = inst2 & 0xff;
1285 if (inst2 & 0x0200)
1286 addr = pv_add_constant (addr, offset);
1287 else
1288 addr = pv_add_constant (addr, -offset);
1289
1290 if (stack.store_would_trash (addr))
1291 break;
1292
1293 stack.store (addr, 4, regs[regno]);
1294
1295 if (inst2 & 0x0100)
1296 regs[bits (insn, 0, 3)] = addr;
1297 }
1298
1299 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1300 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1301 {
1302 int regno = bits (inst2, 12, 15);
1303 pv_t addr;
1304
1305 offset = inst2 & 0xfff;
1306 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1307
1308 if (stack.store_would_trash (addr))
1309 break;
1310
1311 stack.store (addr, 4, regs[regno]);
1312 }
1313
1314 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1315 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1316 /* Ignore stores of argument registers to the stack. */
1317 ;
1318
1319 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1320 && (inst2 & 0x0d00) == 0x0c00
1321 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1322 /* Ignore stores of argument registers to the stack. */
1323 ;
1324
1325 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1326 { registers } */
1327 && (inst2 & 0x8000) == 0x0000
1328 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1329 /* Ignore block loads from the stack, potentially copying
1330 parameters from memory. */
1331 ;
1332
1333 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1334 [Rn, #+/-imm] */
1335 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1336 /* Similarly ignore dual loads from the stack. */
1337 ;
1338
1339 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1340 && (inst2 & 0x0d00) == 0x0c00
1341 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1342 /* Similarly ignore single loads from the stack. */
1343 ;
1344
1345 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1346 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1347 /* Similarly ignore single loads from the stack. */
1348 ;
1349
1350 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1351 && (inst2 & 0x8000) == 0x0000)
1352 {
1353 unsigned int imm = ((bits (insn, 10, 10) << 11)
1354 | (bits (inst2, 12, 14) << 8)
1355 | bits (inst2, 0, 7));
1356
1357 regs[bits (inst2, 8, 11)]
1358 = pv_add_constant (regs[bits (insn, 0, 3)],
1359 thumb_expand_immediate (imm));
1360 }
1361
1362 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1363 && (inst2 & 0x8000) == 0x0000)
1364 {
1365 unsigned int imm = ((bits (insn, 10, 10) << 11)
1366 | (bits (inst2, 12, 14) << 8)
1367 | bits (inst2, 0, 7));
1368
1369 regs[bits (inst2, 8, 11)]
1370 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1371 }
1372
1373 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1374 && (inst2 & 0x8000) == 0x0000)
1375 {
1376 unsigned int imm = ((bits (insn, 10, 10) << 11)
1377 | (bits (inst2, 12, 14) << 8)
1378 | bits (inst2, 0, 7));
1379
1380 regs[bits (inst2, 8, 11)]
1381 = pv_add_constant (regs[bits (insn, 0, 3)],
1382 - (CORE_ADDR) thumb_expand_immediate (imm));
1383 }
1384
1385 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1386 && (inst2 & 0x8000) == 0x0000)
1387 {
1388 unsigned int imm = ((bits (insn, 10, 10) << 11)
1389 | (bits (inst2, 12, 14) << 8)
1390 | bits (inst2, 0, 7));
1391
1392 regs[bits (inst2, 8, 11)]
1393 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1394 }
1395
1396 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1397 {
1398 unsigned int imm = ((bits (insn, 10, 10) << 11)
1399 | (bits (inst2, 12, 14) << 8)
1400 | bits (inst2, 0, 7));
1401
1402 regs[bits (inst2, 8, 11)]
1403 = pv_constant (thumb_expand_immediate (imm));
1404 }
1405
1406 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1407 {
1408 unsigned int imm
1409 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1410
1411 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1412 }
1413
1414 else if (insn == 0xea5f /* mov.w Rd,Rm */
1415 && (inst2 & 0xf0f0) == 0)
1416 {
1417 int dst_reg = (inst2 & 0x0f00) >> 8;
1418 int src_reg = inst2 & 0xf;
1419 regs[dst_reg] = regs[src_reg];
1420 }
1421
1422 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1423 {
1424 /* Constant pool loads. */
1425 unsigned int constant;
1426 CORE_ADDR loc;
1427
1428 offset = bits (inst2, 0, 11);
1429 if (insn & 0x0080)
1430 loc = start + 4 + offset;
1431 else
1432 loc = start + 4 - offset;
1433
1434 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1435 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1436 }
1437
1438 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1439 {
1440 /* Constant pool loads. */
1441 unsigned int constant;
1442 CORE_ADDR loc;
1443
1444 offset = bits (inst2, 0, 7) << 2;
1445 if (insn & 0x0080)
1446 loc = start + 4 + offset;
1447 else
1448 loc = start + 4 - offset;
1449
1450 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1451 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1452
1453 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1454 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1455 }
1456 /* Start of ARMv8.1-m PACBTI extension instructions. */
1457 else if (IS_PAC (whole_insn))
1458 {
1459 /* LR and SP are input registers. PAC is in R12. LR is
1460 signed from this point onwards. NOP space. */
1461 ra_signed_state = true;
1462 }
1463 else if (IS_PACBTI (whole_insn))
1464 {
1465 /* LR and SP are input registers. PAC is in R12 and PC is a
1466 valid BTI landing pad. LR is signed from this point onwards.
1467 NOP space. */
1468 ra_signed_state = true;
1469 }
1470 else if (IS_BTI (whole_insn))
1471 {
1472 /* Valid BTI landing pad. NOP space. */
1473 }
1474 else if (IS_PACG (whole_insn))
1475 {
1476 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1477 this point onwards. */
1478 ra_signed_state = true;
1479 }
1480 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1481 {
1482 /* These instructions appear close to the epilogue, when signed
1483 pointers are getting authenticated. */
1484 ra_signed_state = false;
1485 }
1486 /* End of ARMv8.1-m PACBTI extension instructions */
1487 else if (thumb2_instruction_changes_pc (insn, inst2))
1488 {
1489 /* Don't scan past anything that might change control flow. */
1490 break;
1491 }
1492 else
1493 {
1494 /* The optimizer might shove anything into the prologue,
1495 so we just skip what we don't recognize. */
1496 unrecognized_pc = start;
1497 }
1498
1499 /* Make sure we are dealing with a target that supports ARMv8.1-m
1500 PACBTI. */
1501 if (cache != nullptr && tdep->have_pacbti
1502 && ra_signed_state.has_value ())
1503 {
1504 arm_debug_printf ("Found pacbti instruction at %s",
1505 paddress (gdbarch, start));
1506 arm_debug_printf ("RA is %s",
1507 *ra_signed_state ? "signed" : "not signed");
1508 cache->ra_signed_state = ra_signed_state;
1509 }
1510
1511 start += 2;
1512 }
1513 else if (thumb_instruction_changes_pc (insn))
1514 {
1515 /* Don't scan past anything that might change control flow. */
1516 break;
1517 }
1518 else
1519 {
1520 /* The optimizer might shove anything into the prologue,
1521 so we just skip what we don't recognize. */
1522 unrecognized_pc = start;
1523 }
1524
1525 start += 2;
1526 }
1527
1528 arm_debug_printf ("Prologue scan stopped at %s",
1529 paddress (gdbarch, start));
1530
1531 if (unrecognized_pc == 0)
1532 unrecognized_pc = start;
1533
1534 if (cache == NULL)
1535 return unrecognized_pc;
1536
1537 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1538 {
1539 /* Frame pointer is fp. Frame size is constant. */
1540 cache->framereg = ARM_FP_REGNUM;
1541 cache->framesize = -regs[ARM_FP_REGNUM].k;
1542 }
1543 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1544 {
1545 /* Frame pointer is r7. Frame size is constant. */
1546 cache->framereg = THUMB_FP_REGNUM;
1547 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1548 }
1549 else
1550 {
1551 /* Try the stack pointer... this is a bit desperate. */
1552 cache->framereg = ARM_SP_REGNUM;
1553 cache->framesize = -regs[ARM_SP_REGNUM].k;
1554 }
1555
1556 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1557 if (stack.find_reg (gdbarch, i, &offset))
1558 {
1559 cache->saved_regs[i].set_addr (offset);
1560 if (i == ARM_SP_REGNUM)
1561 arm_cache_set_active_sp_value(cache, tdep, offset);
1562 }
1563
1564 return unrecognized_pc;
1565 }
1566
1567
1568 /* Try to analyze the instructions starting from PC, which load symbol
1569 __stack_chk_guard. Return the address of instruction after loading this
1570 symbol, set the dest register number to *BASEREG, and set the size of
1571 instructions for loading symbol in OFFSET. Return 0 if instructions are
1572 not recognized. */
1573
1574 static CORE_ADDR
1575 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1576 unsigned int *destreg, int *offset)
1577 {
1578 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1579 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1580 unsigned int low, high, address;
1581
1582 address = 0;
1583 if (is_thumb)
1584 {
1585 unsigned short insn1
1586 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1587
1588 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1589 {
1590 *destreg = bits (insn1, 8, 10);
1591 *offset = 2;
1592 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1593 address = read_memory_unsigned_integer (address, 4,
1594 byte_order_for_code);
1595 }
1596 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1597 {
1598 unsigned short insn2
1599 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1600
1601 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1602
1603 insn1
1604 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1605 insn2
1606 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1607
1608 /* movt Rd, #const */
1609 if ((insn1 & 0xfbc0) == 0xf2c0)
1610 {
1611 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1612 *destreg = bits (insn2, 8, 11);
1613 *offset = 8;
1614 address = (high << 16 | low);
1615 }
1616 }
1617 }
1618 else
1619 {
1620 unsigned int insn
1621 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1622
1623 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1624 {
1625 address = bits (insn, 0, 11) + pc + 8;
1626 address = read_memory_unsigned_integer (address, 4,
1627 byte_order_for_code);
1628
1629 *destreg = bits (insn, 12, 15);
1630 *offset = 4;
1631 }
1632 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1633 {
1634 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1635
1636 insn
1637 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1638
1639 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1640 {
1641 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1642 *destreg = bits (insn, 12, 15);
1643 *offset = 8;
1644 address = (high << 16 | low);
1645 }
1646 }
1647 }
1648
1649 return address;
1650 }
1651
1652 /* Try to skip a sequence of instructions used for stack protector. If PC
1653 points to the first instruction of this sequence, return the address of
1654 first instruction after this sequence, otherwise, return original PC.
1655
1656 On arm, this sequence of instructions is composed of mainly three steps,
1657 Step 1: load symbol __stack_chk_guard,
1658 Step 2: load from address of __stack_chk_guard,
1659 Step 3: store it to somewhere else.
1660
1661 Usually, instructions on step 2 and step 3 are the same on various ARM
1662 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1663 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1664 instructions in step 1 vary from different ARM architectures. On ARMv7,
1665 they are,
1666
1667 movw Rn, #:lower16:__stack_chk_guard
1668 movt Rn, #:upper16:__stack_chk_guard
1669
1670 On ARMv5t, it is,
1671
1672 ldr Rn, .Label
1673 ....
1674 .Lable:
1675 .word __stack_chk_guard
1676
1677 Since ldr/str is a very popular instruction, we can't use them as
1678 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1679 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1680 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1681
1682 static CORE_ADDR
1683 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1684 {
1685 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1686 unsigned int basereg;
1687 struct bound_minimal_symbol stack_chk_guard;
1688 int offset;
1689 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1690 CORE_ADDR addr;
1691
1692 /* Try to parse the instructions in Step 1. */
1693 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1694 &basereg, &offset);
1695 if (!addr)
1696 return pc;
1697
1698 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1699 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1700 Otherwise, this sequence cannot be for stack protector. */
1701 if (stack_chk_guard.minsym == NULL
1702 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1703 return pc;
1704
1705 if (is_thumb)
1706 {
1707 unsigned int destreg;
1708 unsigned short insn
1709 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1710
1711 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1712 if ((insn & 0xf800) != 0x6800)
1713 return pc;
1714 if (bits (insn, 3, 5) != basereg)
1715 return pc;
1716 destreg = bits (insn, 0, 2);
1717
1718 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1719 byte_order_for_code);
1720 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1721 if ((insn & 0xf800) != 0x6000)
1722 return pc;
1723 if (destreg != bits (insn, 0, 2))
1724 return pc;
1725 }
1726 else
1727 {
1728 unsigned int destreg;
1729 unsigned int insn
1730 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1731
1732 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1733 if ((insn & 0x0e500000) != 0x04100000)
1734 return pc;
1735 if (bits (insn, 16, 19) != basereg)
1736 return pc;
1737 destreg = bits (insn, 12, 15);
1738 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1739 insn = read_code_unsigned_integer (pc + offset + 4,
1740 4, byte_order_for_code);
1741 if ((insn & 0x0e500000) != 0x04000000)
1742 return pc;
1743 if (bits (insn, 12, 15) != destreg)
1744 return pc;
1745 }
1746 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1747 on arm. */
1748 if (is_thumb)
1749 return pc + offset + 4;
1750 else
1751 return pc + offset + 8;
1752 }
1753
1754 /* Advance the PC across any function entry prologue instructions to
1755 reach some "real" code.
1756
1757 The APCS (ARM Procedure Call Standard) defines the following
1758 prologue:
1759
1760 mov ip, sp
1761 [stmfd sp!, {a1,a2,a3,a4}]
1762 stmfd sp!, {...,fp,ip,lr,pc}
1763 [stfe f7, [sp, #-12]!]
1764 [stfe f6, [sp, #-12]!]
1765 [stfe f5, [sp, #-12]!]
1766 [stfe f4, [sp, #-12]!]
1767 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1768
1769 static CORE_ADDR
1770 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1771 {
1772 CORE_ADDR func_addr, func_end_addr, limit_pc;
1773
1774 /* See if we can determine the end of the prologue via the symbol table.
1775 If so, then return either PC, or the PC after the prologue, whichever
1776 is greater. */
1777 bool func_addr_found
1778 = find_pc_partial_function (pc, NULL, &func_addr, &func_end_addr);
1779
1780 /* Whether the function is thumb mode or not. */
1781 bool func_is_thumb = false;
1782
1783 if (func_addr_found)
1784 {
1785 CORE_ADDR post_prologue_pc
1786 = skip_prologue_using_sal (gdbarch, func_addr);
1787 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1788
1789 if (post_prologue_pc)
1790 post_prologue_pc
1791 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1792
1793
1794 /* GCC always emits a line note before the prologue and another
1795 one after, even if the two are at the same address or on the
1796 same line. Take advantage of this so that we do not need to
1797 know every instruction that might appear in the prologue. We
1798 will have producer information for most binaries; if it is
1799 missing (e.g. for -gstabs), assuming the GNU tools. */
1800 if (post_prologue_pc
1801 && (cust == NULL
1802 || cust->producer () == NULL
1803 || startswith (cust->producer (), "GNU ")
1804 || producer_is_llvm (cust->producer ())))
1805 return post_prologue_pc;
1806
1807 if (post_prologue_pc != 0)
1808 {
1809 CORE_ADDR analyzed_limit;
1810
1811 /* For non-GCC compilers, make sure the entire line is an
1812 acceptable prologue; GDB will round this function's
1813 return value up to the end of the following line so we
1814 can not skip just part of a line (and we do not want to).
1815
1816 RealView does not treat the prologue specially, but does
1817 associate prologue code with the opening brace; so this
1818 lets us skip the first line if we think it is the opening
1819 brace. */
1820 func_is_thumb = arm_pc_is_thumb (gdbarch, func_addr);
1821 if (func_is_thumb)
1822 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1823 post_prologue_pc, NULL);
1824 else
1825 analyzed_limit
1826 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1827 NULL, target_arm_instruction_reader ());
1828
1829 if (analyzed_limit != post_prologue_pc)
1830 return func_addr;
1831
1832 return post_prologue_pc;
1833 }
1834 }
1835
1836 /* Can't determine prologue from the symbol table, need to examine
1837 instructions. */
1838
1839 /* Find an upper limit on the function prologue using the debug
1840 information. If the debug information could not be used to provide
1841 that bound, then use an arbitrary large number as the upper bound. */
1842 /* Like arm_scan_prologue, stop no later than pc + 64. */
1843 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1844 if (limit_pc == 0)
1845 limit_pc = pc + 64; /* Magic. */
1846
1847 /* Set the correct adjustment based on whether the function is thumb mode or
1848 not. We use it to get the address of the last instruction in the
1849 function (as opposed to the first address of the next function). */
1850 CORE_ADDR adjustment = func_is_thumb ? 2 : 4;
1851
1852 limit_pc
1853 = func_end_addr == 0 ? limit_pc : std::min (limit_pc,
1854 func_end_addr - adjustment);
1855
1856 /* Check if this is Thumb code. */
1857 if (arm_pc_is_thumb (gdbarch, pc))
1858 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1859 else
1860 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1861 target_arm_instruction_reader ());
1862 }
1863
1864 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1865 This function decodes a Thumb function prologue to determine:
1866 1) the size of the stack frame
1867 2) which registers are saved on it
1868 3) the offsets of saved regs
1869 4) the offset from the stack pointer to the frame pointer
1870
1871 A typical Thumb function prologue would create this stack frame
1872 (offsets relative to FP)
1873 old SP -> 24 stack parameters
1874 20 LR
1875 16 R7
1876 R7 -> 0 local variables (16 bytes)
1877 SP -> -12 additional stack space (12 bytes)
1878 The frame size would thus be 36 bytes, and the frame offset would be
1879 12 bytes. The frame register is R7.
1880
1881 The comments for thumb_skip_prolog() describe the algorithm we use
1882 to detect the end of the prolog. */
1883
1884 static void
1885 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1886 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1887 {
1888 CORE_ADDR prologue_start;
1889 CORE_ADDR prologue_end;
1890
1891 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1892 &prologue_end))
1893 {
1894 /* See comment in arm_scan_prologue for an explanation of
1895 this heuristics. */
1896 if (prologue_end > prologue_start + 64)
1897 {
1898 prologue_end = prologue_start + 64;
1899 }
1900 }
1901 else
1902 /* We're in the boondocks: we have no idea where the start of the
1903 function is. */
1904 return;
1905
1906 prologue_end = std::min (prologue_end, prev_pc);
1907
1908 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1909 }
1910
1911 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1912 otherwise. */
1913
1914 static int
1915 arm_instruction_restores_sp (unsigned int insn)
1916 {
1917 if (bits (insn, 28, 31) != INST_NV)
1918 {
1919 if ((insn & 0x0df0f000) == 0x0080d000
1920 /* ADD SP (register or immediate). */
1921 || (insn & 0x0df0f000) == 0x0040d000
1922 /* SUB SP (register or immediate). */
1923 || (insn & 0x0ffffff0) == 0x01a0d000
1924 /* MOV SP. */
1925 || (insn & 0x0fff0000) == 0x08bd0000
1926 /* POP (LDMIA). */
1927 || (insn & 0x0fff0000) == 0x049d0000)
1928 /* POP of a single register. */
1929 return 1;
1930 }
1931
1932 return 0;
1933 }
1934
1935 /* Implement immediate value decoding, as described in section A5.2.4
1936 (Modified immediate constants in ARM instructions) of the ARM Architecture
1937 Reference Manual (ARMv7-A and ARMv7-R edition). */
1938
1939 static uint32_t
1940 arm_expand_immediate (uint32_t imm)
1941 {
1942 /* Immediate values are 12 bits long. */
1943 gdb_assert ((imm & 0xfffff000) == 0);
1944
1945 uint32_t unrotated_value = imm & 0xff;
1946 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1947
1948 if (rotate_amount == 0)
1949 return unrotated_value;
1950
1951 return ((unrotated_value >> rotate_amount)
1952 | (unrotated_value << (32 - rotate_amount)));
1953 }
1954
1955 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1956 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1957 fill it in. Return the first address not recognized as a prologue
1958 instruction.
1959
1960 We recognize all the instructions typically found in ARM prologues,
1961 plus harmless instructions which can be skipped (either for analysis
1962 purposes, or a more restrictive set that can be skipped when finding
1963 the end of the prologue). */
1964
1965 static CORE_ADDR
1966 arm_analyze_prologue (struct gdbarch *gdbarch,
1967 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1968 struct arm_prologue_cache *cache,
1969 const arm_instruction_reader &insn_reader)
1970 {
1971 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1972 int regno;
1973 CORE_ADDR offset, current_pc;
1974 pv_t regs[ARM_FPS_REGNUM];
1975 CORE_ADDR unrecognized_pc = 0;
1976 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1977
1978 /* Search the prologue looking for instructions that set up the
1979 frame pointer, adjust the stack pointer, and save registers.
1980
1981 Be careful, however, and if it doesn't look like a prologue,
1982 don't try to scan it. If, for instance, a frameless function
1983 begins with stmfd sp!, then we will tell ourselves there is
1984 a frame, which will confuse stack traceback, as well as "finish"
1985 and other operations that rely on a knowledge of the stack
1986 traceback. */
1987
1988 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1989 regs[regno] = pv_register (regno, 0);
1990 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1991
1992 for (current_pc = prologue_start;
1993 current_pc < prologue_end;
1994 current_pc += 4)
1995 {
1996 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1997
1998 if (insn == 0xe1a0c00d) /* mov ip, sp */
1999 {
2000 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
2001 continue;
2002 }
2003 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
2004 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2005 {
2006 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2007 int rd = bits (insn, 12, 15);
2008 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
2009 continue;
2010 }
2011 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
2012 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2013 {
2014 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2015 int rd = bits (insn, 12, 15);
2016 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2017 continue;
2018 }
2019 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2020 [sp, #-4]! */
2021 {
2022 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2023 break;
2024 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2025 stack.store (regs[ARM_SP_REGNUM], 4,
2026 regs[bits (insn, 12, 15)]);
2027 continue;
2028 }
2029 else if ((insn & 0xffff0000) == 0xe92d0000)
2030 /* stmfd sp!, {..., fp, ip, lr, pc}
2031 or
2032 stmfd sp!, {a1, a2, a3, a4} */
2033 {
2034 int mask = insn & 0xffff;
2035
2036 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2037 break;
2038
2039 /* Calculate offsets of saved registers. */
2040 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2041 if (mask & (1 << regno))
2042 {
2043 regs[ARM_SP_REGNUM]
2044 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2045 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2046 }
2047 }
2048 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2049 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2050 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2051 {
2052 /* No need to add this to saved_regs -- it's just an arg reg. */
2053 continue;
2054 }
2055 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2056 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2057 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2058 {
2059 /* No need to add this to saved_regs -- it's just an arg reg. */
2060 continue;
2061 }
2062 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2063 { registers } */
2064 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2065 {
2066 /* No need to add this to saved_regs -- it's just arg regs. */
2067 continue;
2068 }
2069 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2070 {
2071 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2072 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2073 }
2074 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2075 {
2076 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2077 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2078 }
2079 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2080 [sp, -#c]! */
2081 && tdep->have_fpa_registers)
2082 {
2083 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2084 break;
2085
2086 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2087 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2088 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2089 }
2090 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2091 [sp!] */
2092 && tdep->have_fpa_registers)
2093 {
2094 int n_saved_fp_regs;
2095 unsigned int fp_start_reg, fp_bound_reg;
2096
2097 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2098 break;
2099
2100 if ((insn & 0x800) == 0x800) /* N0 is set */
2101 {
2102 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2103 n_saved_fp_regs = 3;
2104 else
2105 n_saved_fp_regs = 1;
2106 }
2107 else
2108 {
2109 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2110 n_saved_fp_regs = 2;
2111 else
2112 n_saved_fp_regs = 4;
2113 }
2114
2115 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2116 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2117 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2118 {
2119 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2120 stack.store (regs[ARM_SP_REGNUM], 12,
2121 regs[fp_start_reg++]);
2122 }
2123 }
2124 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2125 {
2126 /* Allow some special function calls when skipping the
2127 prologue; GCC generates these before storing arguments to
2128 the stack. */
2129 CORE_ADDR dest = BranchDest (current_pc, insn);
2130
2131 if (skip_prologue_function (gdbarch, dest, 0))
2132 continue;
2133 else
2134 break;
2135 }
2136 else if ((insn & 0xf0000000) != 0xe0000000)
2137 break; /* Condition not true, exit early. */
2138 else if (arm_instruction_changes_pc (insn))
2139 /* Don't scan past anything that might change control flow. */
2140 break;
2141 else if (arm_instruction_restores_sp (insn))
2142 {
2143 /* Don't scan past the epilogue. */
2144 break;
2145 }
2146 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2147 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2148 /* Ignore block loads from the stack, potentially copying
2149 parameters from memory. */
2150 continue;
2151 else if ((insn & 0xfc500000) == 0xe4100000
2152 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2153 /* Similarly ignore single loads from the stack. */
2154 continue;
2155 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2156 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2157 register instead of the stack. */
2158 continue;
2159 else
2160 {
2161 /* The optimizer might shove anything into the prologue, if
2162 we build up cache (cache != NULL) from scanning prologue,
2163 we just skip what we don't recognize and scan further to
2164 make cache as complete as possible. However, if we skip
2165 prologue, we'll stop immediately on unrecognized
2166 instruction. */
2167 unrecognized_pc = current_pc;
2168 if (cache != NULL)
2169 continue;
2170 else
2171 break;
2172 }
2173 }
2174
2175 if (unrecognized_pc == 0)
2176 unrecognized_pc = current_pc;
2177
2178 if (cache)
2179 {
2180 int framereg, framesize;
2181
2182 /* The frame size is just the distance from the frame register
2183 to the original stack pointer. */
2184 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2185 {
2186 /* Frame pointer is fp. */
2187 framereg = ARM_FP_REGNUM;
2188 framesize = -regs[ARM_FP_REGNUM].k;
2189 }
2190 else
2191 {
2192 /* Try the stack pointer... this is a bit desperate. */
2193 framereg = ARM_SP_REGNUM;
2194 framesize = -regs[ARM_SP_REGNUM].k;
2195 }
2196
2197 cache->framereg = framereg;
2198 cache->framesize = framesize;
2199
2200 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2201 if (stack.find_reg (gdbarch, regno, &offset))
2202 {
2203 cache->saved_regs[regno].set_addr (offset);
2204 if (regno == ARM_SP_REGNUM)
2205 arm_cache_set_active_sp_value(cache, tdep, offset);
2206 }
2207 }
2208
2209 arm_debug_printf ("Prologue scan stopped at %s",
2210 paddress (gdbarch, unrecognized_pc));
2211
2212 return unrecognized_pc;
2213 }
2214
2215 static void
2216 arm_scan_prologue (frame_info_ptr this_frame,
2217 struct arm_prologue_cache *cache)
2218 {
2219 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2220 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2221 CORE_ADDR prologue_start, prologue_end;
2222 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2223 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2224 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2225
2226 /* Assume there is no frame until proven otherwise. */
2227 cache->framereg = ARM_SP_REGNUM;
2228 cache->framesize = 0;
2229
2230 /* Check for Thumb prologue. */
2231 if (arm_frame_is_thumb (this_frame))
2232 {
2233 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2234 return;
2235 }
2236
2237 /* Find the function prologue. If we can't find the function in
2238 the symbol table, peek in the stack frame to find the PC. */
2239 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2240 &prologue_end))
2241 {
2242 /* One way to find the end of the prologue (which works well
2243 for unoptimized code) is to do the following:
2244
2245 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2246
2247 if (sal.line == 0)
2248 prologue_end = prev_pc;
2249 else if (sal.end < prologue_end)
2250 prologue_end = sal.end;
2251
2252 This mechanism is very accurate so long as the optimizer
2253 doesn't move any instructions from the function body into the
2254 prologue. If this happens, sal.end will be the last
2255 instruction in the first hunk of prologue code just before
2256 the first instruction that the scheduler has moved from
2257 the body to the prologue.
2258
2259 In order to make sure that we scan all of the prologue
2260 instructions, we use a slightly less accurate mechanism which
2261 may scan more than necessary. To help compensate for this
2262 lack of accuracy, the prologue scanning loop below contains
2263 several clauses which'll cause the loop to terminate early if
2264 an implausible prologue instruction is encountered.
2265
2266 The expression
2267
2268 prologue_start + 64
2269
2270 is a suitable endpoint since it accounts for the largest
2271 possible prologue plus up to five instructions inserted by
2272 the scheduler. */
2273
2274 if (prologue_end > prologue_start + 64)
2275 {
2276 prologue_end = prologue_start + 64; /* See above. */
2277 }
2278 }
2279 else
2280 {
2281 /* We have no symbol information. Our only option is to assume this
2282 function has a standard stack frame and the normal frame register.
2283 Then, we can find the value of our frame pointer on entrance to
2284 the callee (or at the present moment if this is the innermost frame).
2285 The value stored there should be the address of the stmfd + 8. */
2286 CORE_ADDR frame_loc;
2287 ULONGEST return_value;
2288
2289 /* AAPCS does not use a frame register, so we can abort here. */
2290 if (tdep->arm_abi == ARM_ABI_AAPCS)
2291 return;
2292
2293 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2294 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2295 &return_value))
2296 return;
2297 else
2298 {
2299 prologue_start = gdbarch_addr_bits_remove
2300 (gdbarch, return_value) - 8;
2301 prologue_end = prologue_start + 64; /* See above. */
2302 }
2303 }
2304
2305 if (prev_pc < prologue_end)
2306 prologue_end = prev_pc;
2307
2308 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2309 target_arm_instruction_reader ());
2310 }
2311
2312 static struct arm_prologue_cache *
2313 arm_make_prologue_cache (frame_info_ptr this_frame)
2314 {
2315 int reg;
2316 struct arm_prologue_cache *cache;
2317 CORE_ADDR unwound_fp, prev_sp;
2318
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 arm_cache_init (cache, this_frame);
2321
2322 arm_scan_prologue (this_frame, cache);
2323
2324 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2325 if (unwound_fp == 0)
2326 return cache;
2327
2328 arm_gdbarch_tdep *tdep =
2329 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2330
2331 prev_sp = unwound_fp + cache->framesize;
2332 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2333
2334 /* Calculate actual addresses of saved registers using offsets
2335 determined by arm_scan_prologue. */
2336 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2337 if (cache->saved_regs[reg].is_addr ())
2338 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2339 prev_sp);
2340
2341 return cache;
2342 }
2343
2344 /* Implementation of the stop_reason hook for arm_prologue frames. */
2345
2346 static enum unwind_stop_reason
2347 arm_prologue_unwind_stop_reason (frame_info_ptr this_frame,
2348 void **this_cache)
2349 {
2350 struct arm_prologue_cache *cache;
2351 CORE_ADDR pc;
2352
2353 if (*this_cache == NULL)
2354 *this_cache = arm_make_prologue_cache (this_frame);
2355 cache = (struct arm_prologue_cache *) *this_cache;
2356
2357 /* This is meant to halt the backtrace at "_start". */
2358 pc = get_frame_pc (this_frame);
2359 gdbarch *arch = get_frame_arch (this_frame);
2360 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2361 if (pc <= tdep->lowest_pc)
2362 return UNWIND_OUTERMOST;
2363
2364 /* If we've hit a wall, stop. */
2365 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2366 return UNWIND_OUTERMOST;
2367
2368 return UNWIND_NO_REASON;
2369 }
2370
2371 /* Our frame ID for a normal frame is the current function's starting PC
2372 and the caller's SP when we were called. */
2373
2374 static void
2375 arm_prologue_this_id (frame_info_ptr this_frame,
2376 void **this_cache,
2377 struct frame_id *this_id)
2378 {
2379 struct arm_prologue_cache *cache;
2380 struct frame_id id;
2381 CORE_ADDR pc, func;
2382
2383 if (*this_cache == NULL)
2384 *this_cache = arm_make_prologue_cache (this_frame);
2385 cache = (struct arm_prologue_cache *) *this_cache;
2386
2387 arm_gdbarch_tdep *tdep
2388 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2389
2390 /* Use function start address as part of the frame ID. If we cannot
2391 identify the start address (due to missing symbol information),
2392 fall back to just using the current PC. */
2393 pc = get_frame_pc (this_frame);
2394 func = get_frame_func (this_frame);
2395 if (!func)
2396 func = pc;
2397
2398 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2399 *this_id = id;
2400 }
2401
2402 static struct value *
2403 arm_prologue_prev_register (frame_info_ptr this_frame,
2404 void **this_cache,
2405 int prev_regnum)
2406 {
2407 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2408 struct arm_prologue_cache *cache;
2409 CORE_ADDR sp_value;
2410
2411 if (*this_cache == NULL)
2412 *this_cache = arm_make_prologue_cache (this_frame);
2413 cache = (struct arm_prologue_cache *) *this_cache;
2414
2415 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2416
2417 /* If this frame has signed the return address, mark it as so. */
2418 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2419 && *cache->ra_signed_state)
2420 set_frame_previous_pc_masked (this_frame);
2421
2422 /* If we are asked to unwind the PC, then we need to return the LR
2423 instead. The prologue may save PC, but it will point into this
2424 frame's prologue, not the next frame's resume location. Also
2425 strip the saved T bit. A valid LR may have the low bit set, but
2426 a valid PC never does. */
2427 if (prev_regnum == ARM_PC_REGNUM)
2428 {
2429 CORE_ADDR lr;
2430
2431 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2432 return frame_unwind_got_constant (this_frame, prev_regnum,
2433 arm_addr_bits_remove (gdbarch, lr));
2434 }
2435
2436 /* SP is generally not saved to the stack, but this frame is
2437 identified by the next frame's stack pointer at the time of the call.
2438 The value was already reconstructed into PREV_SP. */
2439 if (prev_regnum == ARM_SP_REGNUM)
2440 return frame_unwind_got_constant (this_frame, prev_regnum,
2441 arm_cache_get_prev_sp_value (cache, tdep));
2442
2443 /* The value might be one of the alternative SP, if so, use the
2444 value already constructed. */
2445 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2446 {
2447 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2448 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2449 }
2450
2451 /* The CPSR may have been changed by the call instruction and by the
2452 called function. The only bit we can reconstruct is the T bit,
2453 by checking the low bit of LR as of the call. This is a reliable
2454 indicator of Thumb-ness except for some ARM v4T pre-interworking
2455 Thumb code, which could get away with a clear low bit as long as
2456 the called function did not use bx. Guess that all other
2457 bits are unchanged; the condition flags are presumably lost,
2458 but the processor status is likely valid. */
2459 if (prev_regnum == ARM_PS_REGNUM)
2460 {
2461 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2462 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2463
2464 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2465 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2466 }
2467
2468 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2469 prev_regnum);
2470 }
2471
2472 static frame_unwind arm_prologue_unwind = {
2473 "arm prologue",
2474 NORMAL_FRAME,
2475 arm_prologue_unwind_stop_reason,
2476 arm_prologue_this_id,
2477 arm_prologue_prev_register,
2478 NULL,
2479 default_frame_sniffer
2480 };
2481
2482 /* Maintain a list of ARM exception table entries per objfile, similar to the
2483 list of mapping symbols. We only cache entries for standard ARM-defined
2484 personality routines; the cache will contain only the frame unwinding
2485 instructions associated with the entry (not the descriptors). */
2486
2487 struct arm_exidx_entry
2488 {
2489 CORE_ADDR addr;
2490 gdb_byte *entry;
2491
2492 bool operator< (const arm_exidx_entry &other) const
2493 {
2494 return addr < other.addr;
2495 }
2496 };
2497
2498 struct arm_exidx_data
2499 {
2500 std::vector<std::vector<arm_exidx_entry>> section_maps;
2501 };
2502
2503 /* Per-BFD key to store exception handling information. */
2504 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2505
2506 static struct obj_section *
2507 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2508 {
2509 for (obj_section *osect : objfile->sections ())
2510 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2511 {
2512 bfd_vma start, size;
2513 start = bfd_section_vma (osect->the_bfd_section);
2514 size = bfd_section_size (osect->the_bfd_section);
2515
2516 if (start <= vma && vma < start + size)
2517 return osect;
2518 }
2519
2520 return NULL;
2521 }
2522
2523 /* Parse contents of exception table and exception index sections
2524 of OBJFILE, and fill in the exception table entry cache.
2525
2526 For each entry that refers to a standard ARM-defined personality
2527 routine, extract the frame unwinding instructions (from either
2528 the index or the table section). The unwinding instructions
2529 are normalized by:
2530 - extracting them from the rest of the table data
2531 - converting to host endianness
2532 - appending the implicit 0xb0 ("Finish") code
2533
2534 The extracted and normalized instructions are stored for later
2535 retrieval by the arm_find_exidx_entry routine. */
2536
2537 static void
2538 arm_exidx_new_objfile (struct objfile *objfile)
2539 {
2540 struct arm_exidx_data *data;
2541 asection *exidx, *extab;
2542 bfd_vma exidx_vma = 0, extab_vma = 0;
2543 LONGEST i;
2544
2545 /* If we've already touched this file, do nothing. */
2546 if (arm_exidx_data_key.get (objfile->obfd.get ()) != nullptr)
2547 return;
2548
2549 /* Read contents of exception table and index. */
2550 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2551 ELF_STRING_ARM_unwind);
2552 gdb::byte_vector exidx_data;
2553 if (exidx)
2554 {
2555 exidx_vma = bfd_section_vma (exidx);
2556 exidx_data.resize (bfd_section_size (exidx));
2557
2558 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2559 exidx_data.data (), 0,
2560 exidx_data.size ()))
2561 return;
2562 }
2563
2564 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2565 gdb::byte_vector extab_data;
2566 if (extab)
2567 {
2568 extab_vma = bfd_section_vma (extab);
2569 extab_data.resize (bfd_section_size (extab));
2570
2571 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2572 extab_data.data (), 0,
2573 extab_data.size ()))
2574 return;
2575 }
2576
2577 /* Allocate exception table data structure. */
2578 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2579 data->section_maps.resize (objfile->obfd->section_count);
2580
2581 /* Fill in exception table. */
2582 for (i = 0; i < exidx_data.size () / 8; i++)
2583 {
2584 struct arm_exidx_entry new_exidx_entry;
2585 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2586 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2587 exidx_data.data () + i * 8 + 4);
2588 bfd_vma addr = 0, word = 0;
2589 int n_bytes = 0, n_words = 0;
2590 struct obj_section *sec;
2591 gdb_byte *entry = NULL;
2592
2593 /* Extract address of start of function. */
2594 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2595 idx += exidx_vma + i * 8;
2596
2597 /* Find section containing function and compute section offset. */
2598 sec = arm_obj_section_from_vma (objfile, idx);
2599 if (sec == NULL)
2600 continue;
2601 idx -= bfd_section_vma (sec->the_bfd_section);
2602
2603 /* Determine address of exception table entry. */
2604 if (val == 1)
2605 {
2606 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2607 }
2608 else if ((val & 0xff000000) == 0x80000000)
2609 {
2610 /* Exception table entry embedded in .ARM.exidx
2611 -- must be short form. */
2612 word = val;
2613 n_bytes = 3;
2614 }
2615 else if (!(val & 0x80000000))
2616 {
2617 /* Exception table entry in .ARM.extab. */
2618 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2619 addr += exidx_vma + i * 8 + 4;
2620
2621 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2622 {
2623 word = bfd_h_get_32 (objfile->obfd,
2624 extab_data.data () + addr - extab_vma);
2625 addr += 4;
2626
2627 if ((word & 0xff000000) == 0x80000000)
2628 {
2629 /* Short form. */
2630 n_bytes = 3;
2631 }
2632 else if ((word & 0xff000000) == 0x81000000
2633 || (word & 0xff000000) == 0x82000000)
2634 {
2635 /* Long form. */
2636 n_bytes = 2;
2637 n_words = ((word >> 16) & 0xff);
2638 }
2639 else if (!(word & 0x80000000))
2640 {
2641 bfd_vma pers;
2642 struct obj_section *pers_sec;
2643 int gnu_personality = 0;
2644
2645 /* Custom personality routine. */
2646 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2647 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2648
2649 /* Check whether we've got one of the variants of the
2650 GNU personality routines. */
2651 pers_sec = arm_obj_section_from_vma (objfile, pers);
2652 if (pers_sec)
2653 {
2654 static const char *personality[] =
2655 {
2656 "__gcc_personality_v0",
2657 "__gxx_personality_v0",
2658 "__gcj_personality_v0",
2659 "__gnu_objc_personality_v0",
2660 NULL
2661 };
2662
2663 CORE_ADDR pc = pers + pers_sec->offset ();
2664 int k;
2665
2666 for (k = 0; personality[k]; k++)
2667 if (lookup_minimal_symbol_by_pc_name
2668 (pc, personality[k], objfile))
2669 {
2670 gnu_personality = 1;
2671 break;
2672 }
2673 }
2674
2675 /* If so, the next word contains a word count in the high
2676 byte, followed by the same unwind instructions as the
2677 pre-defined forms. */
2678 if (gnu_personality
2679 && addr + 4 <= extab_vma + extab_data.size ())
2680 {
2681 word = bfd_h_get_32 (objfile->obfd,
2682 (extab_data.data ()
2683 + addr - extab_vma));
2684 addr += 4;
2685 n_bytes = 3;
2686 n_words = ((word >> 24) & 0xff);
2687 }
2688 }
2689 }
2690 }
2691
2692 /* Sanity check address. */
2693 if (n_words)
2694 if (addr < extab_vma
2695 || addr + 4 * n_words > extab_vma + extab_data.size ())
2696 n_words = n_bytes = 0;
2697
2698 /* The unwind instructions reside in WORD (only the N_BYTES least
2699 significant bytes are valid), followed by N_WORDS words in the
2700 extab section starting at ADDR. */
2701 if (n_bytes || n_words)
2702 {
2703 gdb_byte *p = entry
2704 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2705 n_bytes + n_words * 4 + 1);
2706
2707 while (n_bytes--)
2708 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2709
2710 while (n_words--)
2711 {
2712 word = bfd_h_get_32 (objfile->obfd,
2713 extab_data.data () + addr - extab_vma);
2714 addr += 4;
2715
2716 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2717 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2718 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2719 *p++ = (gdb_byte) (word & 0xff);
2720 }
2721
2722 /* Implied "Finish" to terminate the list. */
2723 *p++ = 0xb0;
2724 }
2725
2726 /* Push entry onto vector. They are guaranteed to always
2727 appear in order of increasing addresses. */
2728 new_exidx_entry.addr = idx;
2729 new_exidx_entry.entry = entry;
2730 data->section_maps[sec->the_bfd_section->index].push_back
2731 (new_exidx_entry);
2732 }
2733 }
2734
2735 /* Search for the exception table entry covering MEMADDR. If one is found,
2736 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2737 set *START to the start of the region covered by this entry. */
2738
2739 static gdb_byte *
2740 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2741 {
2742 struct obj_section *sec;
2743
2744 sec = find_pc_section (memaddr);
2745 if (sec != NULL)
2746 {
2747 struct arm_exidx_data *data;
2748 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2749
2750 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2751 if (data != NULL)
2752 {
2753 std::vector<arm_exidx_entry> &map
2754 = data->section_maps[sec->the_bfd_section->index];
2755 if (!map.empty ())
2756 {
2757 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2758
2759 /* std::lower_bound finds the earliest ordered insertion
2760 point. If the following symbol starts at this exact
2761 address, we use that; otherwise, the preceding
2762 exception table entry covers this address. */
2763 if (idx < map.end ())
2764 {
2765 if (idx->addr == map_key.addr)
2766 {
2767 if (start)
2768 *start = idx->addr + sec->addr ();
2769 return idx->entry;
2770 }
2771 }
2772
2773 if (idx > map.begin ())
2774 {
2775 idx = idx - 1;
2776 if (start)
2777 *start = idx->addr + sec->addr ();
2778 return idx->entry;
2779 }
2780 }
2781 }
2782 }
2783
2784 return NULL;
2785 }
2786
2787 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2788 instruction list from the ARM exception table entry ENTRY, allocate and
2789 return a prologue cache structure describing how to unwind this frame.
2790
2791 Return NULL if the unwinding instruction list contains a "spare",
2792 "reserved" or "refuse to unwind" instruction as defined in section
2793 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2794 for the ARM Architecture" document. */
2795
2796 static struct arm_prologue_cache *
2797 arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry)
2798 {
2799 CORE_ADDR vsp = 0;
2800 int vsp_valid = 0;
2801
2802 struct arm_prologue_cache *cache;
2803 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2804 arm_cache_init (cache, this_frame);
2805
2806 for (;;)
2807 {
2808 gdb_byte insn;
2809
2810 /* Whenever we reload SP, we actually have to retrieve its
2811 actual value in the current frame. */
2812 if (!vsp_valid)
2813 {
2814 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2815 {
2816 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2817 vsp = get_frame_register_unsigned (this_frame, reg);
2818 }
2819 else
2820 {
2821 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2822 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2823 }
2824
2825 vsp_valid = 1;
2826 }
2827
2828 /* Decode next unwind instruction. */
2829 insn = *entry++;
2830
2831 if ((insn & 0xc0) == 0)
2832 {
2833 int offset = insn & 0x3f;
2834 vsp += (offset << 2) + 4;
2835 }
2836 else if ((insn & 0xc0) == 0x40)
2837 {
2838 int offset = insn & 0x3f;
2839 vsp -= (offset << 2) + 4;
2840 }
2841 else if ((insn & 0xf0) == 0x80)
2842 {
2843 int mask = ((insn & 0xf) << 8) | *entry++;
2844 int i;
2845
2846 /* The special case of an all-zero mask identifies
2847 "Refuse to unwind". We return NULL to fall back
2848 to the prologue analyzer. */
2849 if (mask == 0)
2850 return NULL;
2851
2852 /* Pop registers r4..r15 under mask. */
2853 for (i = 0; i < 12; i++)
2854 if (mask & (1 << i))
2855 {
2856 cache->saved_regs[4 + i].set_addr (vsp);
2857 vsp += 4;
2858 }
2859
2860 /* Special-case popping SP -- we need to reload vsp. */
2861 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2862 vsp_valid = 0;
2863 }
2864 else if ((insn & 0xf0) == 0x90)
2865 {
2866 int reg = insn & 0xf;
2867
2868 /* Reserved cases. */
2869 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2870 return NULL;
2871
2872 /* Set SP from another register and mark VSP for reload. */
2873 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2874 vsp_valid = 0;
2875 }
2876 else if ((insn & 0xf0) == 0xa0)
2877 {
2878 int count = insn & 0x7;
2879 int pop_lr = (insn & 0x8) != 0;
2880 int i;
2881
2882 /* Pop r4..r[4+count]. */
2883 for (i = 0; i <= count; i++)
2884 {
2885 cache->saved_regs[4 + i].set_addr (vsp);
2886 vsp += 4;
2887 }
2888
2889 /* If indicated by flag, pop LR as well. */
2890 if (pop_lr)
2891 {
2892 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2893 vsp += 4;
2894 }
2895 }
2896 else if (insn == 0xb0)
2897 {
2898 /* We could only have updated PC by popping into it; if so, it
2899 will show up as address. Otherwise, copy LR into PC. */
2900 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2901 cache->saved_regs[ARM_PC_REGNUM]
2902 = cache->saved_regs[ARM_LR_REGNUM];
2903
2904 /* We're done. */
2905 break;
2906 }
2907 else if (insn == 0xb1)
2908 {
2909 int mask = *entry++;
2910 int i;
2911
2912 /* All-zero mask and mask >= 16 is "spare". */
2913 if (mask == 0 || mask >= 16)
2914 return NULL;
2915
2916 /* Pop r0..r3 under mask. */
2917 for (i = 0; i < 4; i++)
2918 if (mask & (1 << i))
2919 {
2920 cache->saved_regs[i].set_addr (vsp);
2921 vsp += 4;
2922 }
2923 }
2924 else if (insn == 0xb2)
2925 {
2926 ULONGEST offset = 0;
2927 unsigned shift = 0;
2928
2929 do
2930 {
2931 offset |= (*entry & 0x7f) << shift;
2932 shift += 7;
2933 }
2934 while (*entry++ & 0x80);
2935
2936 vsp += 0x204 + (offset << 2);
2937 }
2938 else if (insn == 0xb3)
2939 {
2940 int start = *entry >> 4;
2941 int count = (*entry++) & 0xf;
2942 int i;
2943
2944 /* Only registers D0..D15 are valid here. */
2945 if (start + count >= 16)
2946 return NULL;
2947
2948 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2949 for (i = 0; i <= count; i++)
2950 {
2951 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2952 vsp += 8;
2953 }
2954
2955 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2956 vsp += 4;
2957 }
2958 else if ((insn & 0xf8) == 0xb8)
2959 {
2960 int count = insn & 0x7;
2961 int i;
2962
2963 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2964 for (i = 0; i <= count; i++)
2965 {
2966 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2967 vsp += 8;
2968 }
2969
2970 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2971 vsp += 4;
2972 }
2973 else if (insn == 0xc6)
2974 {
2975 int start = *entry >> 4;
2976 int count = (*entry++) & 0xf;
2977 int i;
2978
2979 /* Only registers WR0..WR15 are valid. */
2980 if (start + count >= 16)
2981 return NULL;
2982
2983 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2984 for (i = 0; i <= count; i++)
2985 {
2986 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2987 vsp += 8;
2988 }
2989 }
2990 else if (insn == 0xc7)
2991 {
2992 int mask = *entry++;
2993 int i;
2994
2995 /* All-zero mask and mask >= 16 is "spare". */
2996 if (mask == 0 || mask >= 16)
2997 return NULL;
2998
2999 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
3000 for (i = 0; i < 4; i++)
3001 if (mask & (1 << i))
3002 {
3003 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
3004 vsp += 4;
3005 }
3006 }
3007 else if ((insn & 0xf8) == 0xc0)
3008 {
3009 int count = insn & 0x7;
3010 int i;
3011
3012 /* Pop iwmmx registers WR[10]..WR[10+count]. */
3013 for (i = 0; i <= count; i++)
3014 {
3015 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3016 vsp += 8;
3017 }
3018 }
3019 else if (insn == 0xc8)
3020 {
3021 int start = *entry >> 4;
3022 int count = (*entry++) & 0xf;
3023 int i;
3024
3025 /* Only registers D0..D31 are valid. */
3026 if (start + count >= 16)
3027 return NULL;
3028
3029 /* Pop VFP double-precision registers
3030 D[16+start]..D[16+start+count]. */
3031 for (i = 0; i <= count; i++)
3032 {
3033 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3034 vsp += 8;
3035 }
3036 }
3037 else if (insn == 0xc9)
3038 {
3039 int start = *entry >> 4;
3040 int count = (*entry++) & 0xf;
3041 int i;
3042
3043 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3044 for (i = 0; i <= count; i++)
3045 {
3046 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3047 vsp += 8;
3048 }
3049 }
3050 else if ((insn & 0xf8) == 0xd0)
3051 {
3052 int count = insn & 0x7;
3053 int i;
3054
3055 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3056 for (i = 0; i <= count; i++)
3057 {
3058 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3059 vsp += 8;
3060 }
3061 }
3062 else
3063 {
3064 /* Everything else is "spare". */
3065 return NULL;
3066 }
3067 }
3068
3069 /* If we restore SP from a register, assume this was the frame register.
3070 Otherwise just fall back to SP as frame register. */
3071 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3072 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3073 else
3074 cache->framereg = ARM_SP_REGNUM;
3075
3076 /* Determine offset to previous frame. */
3077 cache->framesize
3078 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3079
3080 /* We already got the previous SP. */
3081 arm_gdbarch_tdep *tdep
3082 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3083 arm_cache_set_active_sp_value (cache, tdep, vsp);
3084
3085 return cache;
3086 }
3087
3088 /* Unwinding via ARM exception table entries. Note that the sniffer
3089 already computes a filled-in prologue cache, which is then used
3090 with the same arm_prologue_this_id and arm_prologue_prev_register
3091 routines also used for prologue-parsing based unwinding. */
3092
3093 static int
3094 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3095 frame_info_ptr this_frame,
3096 void **this_prologue_cache)
3097 {
3098 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3099 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3100 CORE_ADDR addr_in_block, exidx_region, func_start;
3101 struct arm_prologue_cache *cache;
3102 gdb_byte *entry;
3103
3104 /* See if we have an ARM exception table entry covering this address. */
3105 addr_in_block = get_frame_address_in_block (this_frame);
3106 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3107 if (!entry)
3108 return 0;
3109
3110 /* The ARM exception table does not describe unwind information
3111 for arbitrary PC values, but is guaranteed to be correct only
3112 at call sites. We have to decide here whether we want to use
3113 ARM exception table information for this frame, or fall back
3114 to using prologue parsing. (Note that if we have DWARF CFI,
3115 this sniffer isn't even called -- CFI is always preferred.)
3116
3117 Before we make this decision, however, we check whether we
3118 actually have *symbol* information for the current frame.
3119 If not, prologue parsing would not work anyway, so we might
3120 as well use the exception table and hope for the best. */
3121 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3122 {
3123 int exc_valid = 0;
3124
3125 /* If the next frame is "normal", we are at a call site in this
3126 frame, so exception information is guaranteed to be valid. */
3127 if (get_next_frame (this_frame)
3128 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3129 exc_valid = 1;
3130
3131 /* Some syscalls keep PC pointing to the SVC instruction itself. */
3132 for (int shift = 0; shift <= 1 && !exc_valid; ++shift)
3133 {
3134 /* We also assume exception information is valid if we're currently
3135 blocked in a system call. The system library is supposed to
3136 ensure this, so that e.g. pthread cancellation works. */
3137 if (arm_frame_is_thumb (this_frame))
3138 {
3139 ULONGEST insn;
3140
3141 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3142 - (shift ? 2 : 0)),
3143 2, byte_order_for_code,
3144 &insn)
3145 && (insn & 0xff00) == 0xdf00 /* svc */)
3146 exc_valid = 1;
3147 }
3148 else
3149 {
3150 ULONGEST insn;
3151
3152 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3153 - (shift ? 4 : 0)),
3154 4, byte_order_for_code,
3155 &insn)
3156 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3157 exc_valid = 1;
3158 }
3159 }
3160
3161 /* Bail out if we don't know that exception information is valid. */
3162 if (!exc_valid)
3163 return 0;
3164
3165 /* The ARM exception index does not mark the *end* of the region
3166 covered by the entry, and some functions will not have any entry.
3167 To correctly recognize the end of the covered region, the linker
3168 should have inserted dummy records with a CANTUNWIND marker.
3169
3170 Unfortunately, current versions of GNU ld do not reliably do
3171 this, and thus we may have found an incorrect entry above.
3172 As a (temporary) sanity check, we only use the entry if it
3173 lies *within* the bounds of the function. Note that this check
3174 might reject perfectly valid entries that just happen to cover
3175 multiple functions; therefore this check ought to be removed
3176 once the linker is fixed. */
3177 if (func_start > exidx_region)
3178 return 0;
3179 }
3180
3181 /* Decode the list of unwinding instructions into a prologue cache.
3182 Note that this may fail due to e.g. a "refuse to unwind" code. */
3183 cache = arm_exidx_fill_cache (this_frame, entry);
3184 if (!cache)
3185 return 0;
3186
3187 *this_prologue_cache = cache;
3188 return 1;
3189 }
3190
3191 struct frame_unwind arm_exidx_unwind = {
3192 "arm exidx",
3193 NORMAL_FRAME,
3194 default_frame_unwind_stop_reason,
3195 arm_prologue_this_id,
3196 arm_prologue_prev_register,
3197 NULL,
3198 arm_exidx_unwind_sniffer
3199 };
3200
3201 static struct arm_prologue_cache *
3202 arm_make_epilogue_frame_cache (frame_info_ptr this_frame)
3203 {
3204 struct arm_prologue_cache *cache;
3205 int reg;
3206
3207 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3208 arm_cache_init (cache, this_frame);
3209
3210 /* Still rely on the offset calculated from prologue. */
3211 arm_scan_prologue (this_frame, cache);
3212
3213 /* Since we are in epilogue, the SP has been restored. */
3214 arm_gdbarch_tdep *tdep
3215 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3216 arm_cache_set_active_sp_value (cache, tdep,
3217 get_frame_register_unsigned (this_frame,
3218 ARM_SP_REGNUM));
3219
3220 /* Calculate actual addresses of saved registers using offsets
3221 determined by arm_scan_prologue. */
3222 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3223 if (cache->saved_regs[reg].is_addr ())
3224 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3225 + arm_cache_get_prev_sp_value (cache, tdep));
3226
3227 return cache;
3228 }
3229
3230 /* Implementation of function hook 'this_id' in
3231 'struct frame_uwnind' for epilogue unwinder. */
3232
3233 static void
3234 arm_epilogue_frame_this_id (frame_info_ptr this_frame,
3235 void **this_cache,
3236 struct frame_id *this_id)
3237 {
3238 struct arm_prologue_cache *cache;
3239 CORE_ADDR pc, func;
3240
3241 if (*this_cache == NULL)
3242 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3243 cache = (struct arm_prologue_cache *) *this_cache;
3244
3245 /* Use function start address as part of the frame ID. If we cannot
3246 identify the start address (due to missing symbol information),
3247 fall back to just using the current PC. */
3248 pc = get_frame_pc (this_frame);
3249 func = get_frame_func (this_frame);
3250 if (func == 0)
3251 func = pc;
3252
3253 arm_gdbarch_tdep *tdep
3254 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3255 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3256 }
3257
3258 /* Implementation of function hook 'prev_register' in
3259 'struct frame_uwnind' for epilogue unwinder. */
3260
3261 static struct value *
3262 arm_epilogue_frame_prev_register (frame_info_ptr this_frame,
3263 void **this_cache, int regnum)
3264 {
3265 if (*this_cache == NULL)
3266 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3267
3268 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3269 }
3270
3271 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3272 CORE_ADDR pc);
3273 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3274 CORE_ADDR pc);
3275
3276 /* Implementation of function hook 'sniffer' in
3277 'struct frame_uwnind' for epilogue unwinder. */
3278
3279 static int
3280 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3281 frame_info_ptr this_frame,
3282 void **this_prologue_cache)
3283 {
3284 if (frame_relative_level (this_frame) == 0)
3285 {
3286 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3287 CORE_ADDR pc = get_frame_pc (this_frame);
3288
3289 if (arm_frame_is_thumb (this_frame))
3290 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3291 else
3292 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3293 }
3294 else
3295 return 0;
3296 }
3297
3298 /* Frame unwinder from epilogue. */
3299
3300 static const struct frame_unwind arm_epilogue_frame_unwind =
3301 {
3302 "arm epilogue",
3303 NORMAL_FRAME,
3304 default_frame_unwind_stop_reason,
3305 arm_epilogue_frame_this_id,
3306 arm_epilogue_frame_prev_register,
3307 NULL,
3308 arm_epilogue_frame_sniffer,
3309 };
3310
3311 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3312 trampoline, return the target PC. Otherwise return 0.
3313
3314 void call0a (char c, short s, int i, long l) {}
3315
3316 int main (void)
3317 {
3318 (*pointer_to_call0a) (c, s, i, l);
3319 }
3320
3321 Instead of calling a stub library function _call_via_xx (xx is
3322 the register name), GCC may inline the trampoline in the object
3323 file as below (register r2 has the address of call0a).
3324
3325 .global main
3326 .type main, %function
3327 ...
3328 bl .L1
3329 ...
3330 .size main, .-main
3331
3332 .L1:
3333 bx r2
3334
3335 The trampoline 'bx r2' doesn't belong to main. */
3336
3337 static CORE_ADDR
3338 arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc)
3339 {
3340 /* The heuristics of recognizing such trampoline is that FRAME is
3341 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3342 if (arm_frame_is_thumb (frame))
3343 {
3344 gdb_byte buf[2];
3345
3346 if (target_read_memory (pc, buf, 2) == 0)
3347 {
3348 struct gdbarch *gdbarch = get_frame_arch (frame);
3349 enum bfd_endian byte_order_for_code
3350 = gdbarch_byte_order_for_code (gdbarch);
3351 uint16_t insn
3352 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3353
3354 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3355 {
3356 CORE_ADDR dest
3357 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3358
3359 /* Clear the LSB so that gdb core sets step-resume
3360 breakpoint at the right address. */
3361 return UNMAKE_THUMB_ADDR (dest);
3362 }
3363 }
3364 }
3365
3366 return 0;
3367 }
3368
3369 static struct arm_prologue_cache *
3370 arm_make_stub_cache (frame_info_ptr this_frame)
3371 {
3372 struct arm_prologue_cache *cache;
3373
3374 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3375 arm_cache_init (cache, this_frame);
3376
3377 arm_gdbarch_tdep *tdep
3378 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3379 arm_cache_set_active_sp_value (cache, tdep,
3380 get_frame_register_unsigned (this_frame,
3381 ARM_SP_REGNUM));
3382
3383 return cache;
3384 }
3385
3386 /* Our frame ID for a stub frame is the current SP and LR. */
3387
3388 static void
3389 arm_stub_this_id (frame_info_ptr this_frame,
3390 void **this_cache,
3391 struct frame_id *this_id)
3392 {
3393 struct arm_prologue_cache *cache;
3394
3395 if (*this_cache == NULL)
3396 *this_cache = arm_make_stub_cache (this_frame);
3397 cache = (struct arm_prologue_cache *) *this_cache;
3398
3399 arm_gdbarch_tdep *tdep
3400 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3401 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3402 get_frame_pc (this_frame));
3403 }
3404
3405 static int
3406 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3407 frame_info_ptr this_frame,
3408 void **this_prologue_cache)
3409 {
3410 CORE_ADDR addr_in_block;
3411 gdb_byte dummy[4];
3412 CORE_ADDR pc, start_addr;
3413 const char *name;
3414
3415 addr_in_block = get_frame_address_in_block (this_frame);
3416 pc = get_frame_pc (this_frame);
3417 if (in_plt_section (addr_in_block)
3418 /* We also use the stub winder if the target memory is unreadable
3419 to avoid having the prologue unwinder trying to read it. */
3420 || target_read_memory (pc, dummy, 4) != 0)
3421 return 1;
3422
3423 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3424 && arm_skip_bx_reg (this_frame, pc) != 0)
3425 return 1;
3426
3427 return 0;
3428 }
3429
3430 struct frame_unwind arm_stub_unwind = {
3431 "arm stub",
3432 NORMAL_FRAME,
3433 default_frame_unwind_stop_reason,
3434 arm_stub_this_id,
3435 arm_prologue_prev_register,
3436 NULL,
3437 arm_stub_unwind_sniffer
3438 };
3439
3440 /* Put here the code to store, into CACHE->saved_regs, the addresses
3441 of the saved registers of frame described by THIS_FRAME. CACHE is
3442 returned. */
3443
3444 static struct arm_prologue_cache *
3445 arm_m_exception_cache (frame_info_ptr this_frame)
3446 {
3447 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3448 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3449 struct arm_prologue_cache *cache;
3450
3451 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3452 arm_cache_init (cache, this_frame);
3453
3454 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3455 describes which bits in LR that define which stack was used prior
3456 to the exception and if FPU is used (causing extended stack frame). */
3457
3458 /* In the lockup state PC contains a lockup magic value.
3459 The PC value of the the next outer frame is irreversibly
3460 lost. The other registers are intact so LR likely contains
3461 PC of some frame next to the outer one, but we cannot analyze
3462 the next outer frame without knowing its PC
3463 therefore we do not know SP fixup for this frame.
3464 Some heuristics to resynchronize SP might be possible.
3465 For simplicity, just terminate the unwinding to prevent it going
3466 astray and attempting to read data/addresses it shouldn't,
3467 which may cause further issues due to side-effects. */
3468 CORE_ADDR pc = get_frame_pc (this_frame);
3469 if (arm_m_addr_is_lockup (pc))
3470 {
3471 /* The lockup can be real just in the innermost frame
3472 as the CPU is stopped and cannot create more frames.
3473 If we hit lockup magic PC in the other frame, it is
3474 just a sentinel at the top of stack: do not warn then. */
3475 if (frame_relative_level (this_frame) == 0)
3476 warning (_("ARM M in lockup state, stack unwinding terminated."));
3477
3478 /* Terminate any further stack unwinding. */
3479 arm_cache_set_active_sp_value (cache, tdep, 0);
3480 return cache;
3481 }
3482
3483 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3484
3485 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3486 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3487 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3488 reset if Main Extension is implemented, otherwise the value is unknown. */
3489 if (lr == 0xffffffff)
3490 {
3491 /* Terminate any further stack unwinding. */
3492 arm_cache_set_active_sp_value (cache, tdep, 0);
3493 return cache;
3494 }
3495
3496 /* Check FNC_RETURN indicator bits (24-31). */
3497 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3498 if (fnc_return)
3499 {
3500 /* FNC_RETURN is only valid for targets with Security Extension. */
3501 if (!tdep->have_sec_ext)
3502 {
3503 error (_("While unwinding an exception frame, found unexpected Link "
3504 "Register value %s that requires the security extension, "
3505 "but the extension was not found or is disabled. This "
3506 "should not happen and may be caused by corrupt data or a "
3507 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3508 }
3509
3510 if (!arm_unwind_secure_frames)
3511 {
3512 warning (_("Non-secure to secure stack unwinding disabled."));
3513
3514 /* Terminate any further stack unwinding. */
3515 arm_cache_set_active_sp_value (cache, tdep, 0);
3516 return cache;
3517 }
3518
3519 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3520 if ((xpsr & 0x1ff) != 0)
3521 /* Handler mode: This is the mode that exceptions are handled in. */
3522 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3523 else
3524 /* Thread mode: This is the normal mode that programs run in. */
3525 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3526
3527 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3528
3529 /* Stack layout for a function call from Secure to Non-Secure state
3530 (ARMv8-M section B3.16):
3531
3532 SP Offset
3533
3534 +-------------------+
3535 0x08 | |
3536 +-------------------+ <-- Original SP
3537 0x04 | Partial xPSR |
3538 +-------------------+
3539 0x00 | Return Address |
3540 +===================+ <-- New SP */
3541
3542 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3543 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3544 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3545
3546 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3547
3548 return cache;
3549 }
3550
3551 /* Check EXC_RETURN indicator bits (24-31). */
3552 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3553 if (exc_return)
3554 {
3555 int sp_regnum;
3556 bool secure_stack_used = false;
3557 bool default_callee_register_stacking = false;
3558 bool exception_domain_is_secure = false;
3559 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3560
3561 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3562 bool process_stack_used = (bit (lr, 2) != 0);
3563
3564 if (tdep->have_sec_ext)
3565 {
3566 secure_stack_used = (bit (lr, 6) != 0);
3567 default_callee_register_stacking = (bit (lr, 5) != 0);
3568 exception_domain_is_secure = (bit (lr, 0) != 0);
3569
3570 /* Unwinding from non-secure to secure can trip security
3571 measures. In order to avoid the debugger being
3572 intrusive, rely on the user to configure the requested
3573 mode. */
3574 if (secure_stack_used && !exception_domain_is_secure
3575 && !arm_unwind_secure_frames)
3576 {
3577 warning (_("Non-secure to secure stack unwinding disabled."));
3578
3579 /* Terminate any further stack unwinding. */
3580 arm_cache_set_active_sp_value (cache, tdep, 0);
3581 return cache;
3582 }
3583
3584 if (process_stack_used)
3585 {
3586 if (secure_stack_used)
3587 /* Secure thread (process) stack used, use PSP_S as SP. */
3588 sp_regnum = tdep->m_profile_psp_s_regnum;
3589 else
3590 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3591 sp_regnum = tdep->m_profile_psp_ns_regnum;
3592 }
3593 else
3594 {
3595 if (secure_stack_used)
3596 /* Secure main stack used, use MSP_S as SP. */
3597 sp_regnum = tdep->m_profile_msp_s_regnum;
3598 else
3599 /* Non-secure main stack used, use MSP_NS as SP. */
3600 sp_regnum = tdep->m_profile_msp_ns_regnum;
3601 }
3602 }
3603 else
3604 {
3605 if (process_stack_used)
3606 /* Thread (process) stack used, use PSP as SP. */
3607 sp_regnum = tdep->m_profile_psp_regnum;
3608 else
3609 /* Main stack used, use MSP as SP. */
3610 sp_regnum = tdep->m_profile_msp_regnum;
3611 }
3612
3613 /* Set the active SP regnum. */
3614 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3615
3616 /* Fetch the SP to use for this frame. */
3617 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3618
3619 /* Exception entry context stacking are described in ARMv8-M (section
3620 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3621 Manuals.
3622
3623 The following figure shows the structure of the stack frame when
3624 Security and Floating-point extensions are present.
3625
3626 SP Offsets
3627 Without With
3628 Callee Regs Callee Regs
3629 (Secure -> Non-Secure)
3630 +-------------------+
3631 0xA8 | | 0xD0
3632 +===================+ --+ <-- Original SP
3633 0xA4 | S31 | 0xCC |
3634 +-------------------+ |
3635 ... | Additional FP context
3636 +-------------------+ |
3637 0x68 | S16 | 0x90 |
3638 +===================+ --+
3639 0x64 | Reserved | 0x8C |
3640 +-------------------+ |
3641 0x60 | FPSCR | 0x88 |
3642 +-------------------+ |
3643 0x5C | S15 | 0x84 | FP context
3644 +-------------------+ |
3645 ... |
3646 +-------------------+ |
3647 0x20 | S0 | 0x48 |
3648 +===================+ --+
3649 0x1C | xPSR | 0x44 |
3650 +-------------------+ |
3651 0x18 | Return address | 0x40 |
3652 +-------------------+ |
3653 0x14 | LR(R14) | 0x3C |
3654 +-------------------+ |
3655 0x10 | R12 | 0x38 | State context
3656 +-------------------+ |
3657 0x0C | R3 | 0x34 |
3658 +-------------------+ |
3659 ... |
3660 +-------------------+ |
3661 0x00 | R0 | 0x28 |
3662 +===================+ --+
3663 | R11 | 0x24 |
3664 +-------------------+ |
3665 ... |
3666 +-------------------+ | Additional state
3667 | R4 | 0x08 | context when
3668 +-------------------+ | transitioning from
3669 | Reserved | 0x04 | Secure to Non-Secure
3670 +-------------------+ |
3671 | Magic signature | 0x00 |
3672 +===================+ --+ <-- New SP */
3673
3674 uint32_t sp_r0_offset = 0;
3675
3676 /* With the Security extension, the hardware saves R4..R11 too. */
3677 if (tdep->have_sec_ext && secure_stack_used
3678 && (!default_callee_register_stacking || !exception_domain_is_secure))
3679 {
3680 /* Read R4..R11 from the integer callee registers. */
3681 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3682 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3683 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3684 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3685 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3686 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3687 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3688 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3689 sp_r0_offset = 0x28;
3690 }
3691
3692 /* The hardware saves eight 32-bit words, comprising xPSR,
3693 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3694 "B1.5.6 Exception entry behavior" in
3695 "ARMv7-M Architecture Reference Manual". */
3696 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3697 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3698 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3699 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3700 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3701 + 0x10);
3702 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3703 + 0x14);
3704 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3705 + 0x18);
3706 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3707 + 0x1C);
3708
3709 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3710 type used. */
3711 bool extended_frame_used = (bit (lr, 4) == 0);
3712 if (extended_frame_used)
3713 {
3714 ULONGEST fpccr;
3715 ULONGEST fpcar;
3716
3717 /* Read FPCCR register. */
3718 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3719 byte_order, &fpccr))
3720 {
3721 warning (_("Could not fetch required FPCCR content. Further "
3722 "unwinding is impossible."));
3723 arm_cache_set_active_sp_value (cache, tdep, 0);
3724 return cache;
3725 }
3726
3727 /* Read FPCAR register. */
3728 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3729 byte_order, &fpcar))
3730 {
3731 warning (_("Could not fetch FPCAR content. Further unwinding of "
3732 "FP register values will be unreliable."));
3733 fpcar = 0;
3734 }
3735
3736 bool fpccr_aspen = bit (fpccr, 31);
3737 bool fpccr_lspen = bit (fpccr, 30);
3738 bool fpccr_ts = bit (fpccr, 26);
3739 bool fpccr_lspact = bit (fpccr, 0);
3740
3741 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3742 for FP registers is enabled or disabled. The LSPACT bit indicate,
3743 together with FPCAR, if the lazy state preservation feature is
3744 active for the current frame or for another frame.
3745 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3746 supported by Cortex-M4F architecture for details. */
3747 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3748 == (fpcar & ~0x7));
3749 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3750 && fpccr_lspact
3751 && fpcar_points_to_this_frame));
3752
3753 /* Extended stack frame type used. */
3754 if (read_fp_regs_from_stack)
3755 {
3756 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3757 for (int i = 0; i < 8; i++)
3758 {
3759 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3760 addr += 8;
3761 }
3762 }
3763 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3764 + sp_r0_offset + 0x60);
3765
3766 if (tdep->have_sec_ext && !default_callee_register_stacking
3767 && fpccr_ts)
3768 {
3769 /* Handle floating-point callee saved registers. */
3770 if (read_fp_regs_from_stack)
3771 {
3772 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3773 for (int i = 8; i < 16; i++)
3774 {
3775 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3776 addr += 8;
3777 }
3778 }
3779
3780 arm_cache_set_active_sp_value (cache, tdep,
3781 unwound_sp + sp_r0_offset + 0xA8);
3782 }
3783 else
3784 {
3785 /* Offset 0x64 is reserved. */
3786 arm_cache_set_active_sp_value (cache, tdep,
3787 unwound_sp + sp_r0_offset + 0x68);
3788 }
3789 }
3790 else
3791 {
3792 /* Standard stack frame type used. */
3793 arm_cache_set_active_sp_value (cache, tdep,
3794 unwound_sp + sp_r0_offset + 0x20);
3795 }
3796
3797 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3798 aligner between the top of the 32-byte stack frame and the
3799 previous context's stack pointer. */
3800 ULONGEST xpsr;
3801 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3802 .addr (), ARM_INT_REGISTER_SIZE,
3803 byte_order, &xpsr))
3804 {
3805 warning (_("Could not fetch required XPSR content. Further "
3806 "unwinding is impossible."));
3807 arm_cache_set_active_sp_value (cache, tdep, 0);
3808 return cache;
3809 }
3810
3811 if (bit (xpsr, 9) != 0)
3812 {
3813 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3814 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3815 }
3816
3817 return cache;
3818 }
3819
3820 internal_error (_("While unwinding an exception frame, "
3821 "found unexpected Link Register value "
3822 "%s. This should not happen and may "
3823 "be caused by corrupt data or a bug in"
3824 " GDB."),
3825 phex (lr, ARM_INT_REGISTER_SIZE));
3826 }
3827
3828 /* Implementation of the stop_reason hook for arm_m_exception frames. */
3829
3830 static enum unwind_stop_reason
3831 arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame,
3832 void **this_cache)
3833 {
3834 struct arm_prologue_cache *cache;
3835 arm_gdbarch_tdep *tdep
3836 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3837
3838 if (*this_cache == NULL)
3839 *this_cache = arm_m_exception_cache (this_frame);
3840 cache = (struct arm_prologue_cache *) *this_cache;
3841
3842 /* If we've hit a wall, stop. */
3843 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3844 return UNWIND_OUTERMOST;
3845
3846 return UNWIND_NO_REASON;
3847 }
3848
3849 /* Implementation of function hook 'this_id' in
3850 'struct frame_uwnind'. */
3851
3852 static void
3853 arm_m_exception_this_id (frame_info_ptr this_frame,
3854 void **this_cache,
3855 struct frame_id *this_id)
3856 {
3857 struct arm_prologue_cache *cache;
3858
3859 if (*this_cache == NULL)
3860 *this_cache = arm_m_exception_cache (this_frame);
3861 cache = (struct arm_prologue_cache *) *this_cache;
3862
3863 /* Our frame ID for a stub frame is the current SP and LR. */
3864 arm_gdbarch_tdep *tdep
3865 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3866 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3867 get_frame_pc (this_frame));
3868 }
3869
3870 /* Implementation of function hook 'prev_register' in
3871 'struct frame_uwnind'. */
3872
3873 static struct value *
3874 arm_m_exception_prev_register (frame_info_ptr this_frame,
3875 void **this_cache,
3876 int prev_regnum)
3877 {
3878 struct arm_prologue_cache *cache;
3879 CORE_ADDR sp_value;
3880
3881 if (*this_cache == NULL)
3882 *this_cache = arm_m_exception_cache (this_frame);
3883 cache = (struct arm_prologue_cache *) *this_cache;
3884
3885 /* The value was already reconstructed into PREV_SP. */
3886 arm_gdbarch_tdep *tdep
3887 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3888 if (prev_regnum == ARM_SP_REGNUM)
3889 return frame_unwind_got_constant (this_frame, prev_regnum,
3890 arm_cache_get_prev_sp_value (cache, tdep));
3891
3892 /* If we are asked to unwind the PC, strip the saved T bit. */
3893 if (prev_regnum == ARM_PC_REGNUM)
3894 {
3895 struct value *value = trad_frame_get_prev_register (this_frame,
3896 cache->saved_regs,
3897 prev_regnum);
3898 CORE_ADDR pc = value_as_address (value);
3899 return frame_unwind_got_constant (this_frame, prev_regnum,
3900 UNMAKE_THUMB_ADDR (pc));
3901 }
3902
3903 /* The value might be one of the alternative SP, if so, use the
3904 value already constructed. */
3905 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3906 {
3907 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3908 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3909 }
3910
3911 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3912 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3913 pattern. */
3914 if (prev_regnum == ARM_PS_REGNUM)
3915 {
3916 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3917 struct value *value = trad_frame_get_prev_register (this_frame,
3918 cache->saved_regs,
3919 ARM_PC_REGNUM);
3920 CORE_ADDR pc = value_as_address (value);
3921 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3922 ARM_PS_REGNUM);
3923 ULONGEST xpsr = value_as_long (value);
3924
3925 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3926 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3927 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3928 }
3929
3930 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3931 prev_regnum);
3932 }
3933
3934 /* Implementation of function hook 'sniffer' in
3935 'struct frame_uwnind'. */
3936
3937 static int
3938 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3939 frame_info_ptr this_frame,
3940 void **this_prologue_cache)
3941 {
3942 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3943 CORE_ADDR this_pc = get_frame_pc (this_frame);
3944
3945 /* No need to check is_m; this sniffer is only registered for
3946 M-profile architectures. */
3947
3948 /* Check if exception frame returns to a magic PC value. */
3949 return arm_m_addr_is_magic (gdbarch, this_pc);
3950 }
3951
3952 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3953 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3954
3955 struct frame_unwind arm_m_exception_unwind =
3956 {
3957 "arm m exception lockup sec_fnc",
3958 SIGTRAMP_FRAME,
3959 arm_m_exception_frame_unwind_stop_reason,
3960 arm_m_exception_this_id,
3961 arm_m_exception_prev_register,
3962 NULL,
3963 arm_m_exception_unwind_sniffer
3964 };
3965
3966 static CORE_ADDR
3967 arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache)
3968 {
3969 struct arm_prologue_cache *cache;
3970
3971 if (*this_cache == NULL)
3972 *this_cache = arm_make_prologue_cache (this_frame);
3973 cache = (struct arm_prologue_cache *) *this_cache;
3974
3975 arm_gdbarch_tdep *tdep
3976 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3977 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3978 }
3979
3980 struct frame_base arm_normal_base = {
3981 &arm_prologue_unwind,
3982 arm_normal_frame_base,
3983 arm_normal_frame_base,
3984 arm_normal_frame_base
3985 };
3986
3987 struct arm_dwarf2_prev_register_cache
3988 {
3989 /* Cached value of the corresponding stack pointer for the inner frame. */
3990 CORE_ADDR sp;
3991 CORE_ADDR msp;
3992 CORE_ADDR msp_s;
3993 CORE_ADDR msp_ns;
3994 CORE_ADDR psp;
3995 CORE_ADDR psp_s;
3996 CORE_ADDR psp_ns;
3997 };
3998
3999 static struct value *
4000 arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache,
4001 int regnum)
4002 {
4003 struct gdbarch * gdbarch = get_frame_arch (this_frame);
4004 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4005 CORE_ADDR lr;
4006 ULONGEST cpsr;
4007 arm_dwarf2_prev_register_cache *cache
4008 = ((arm_dwarf2_prev_register_cache *)
4009 dwarf2_frame_get_fn_data (this_frame, this_cache,
4010 arm_dwarf2_prev_register));
4011
4012 if (!cache)
4013 {
4014 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache);
4015 cache = ((arm_dwarf2_prev_register_cache *)
4016 dwarf2_frame_allocate_fn_data (this_frame, this_cache,
4017 arm_dwarf2_prev_register, size));
4018
4019 if (tdep->have_sec_ext)
4020 {
4021 cache->sp
4022 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4023
4024 cache->msp_s
4025 = get_frame_register_unsigned (this_frame,
4026 tdep->m_profile_msp_s_regnum);
4027 cache->msp_ns
4028 = get_frame_register_unsigned (this_frame,
4029 tdep->m_profile_msp_ns_regnum);
4030 cache->psp_s
4031 = get_frame_register_unsigned (this_frame,
4032 tdep->m_profile_psp_s_regnum);
4033 cache->psp_ns
4034 = get_frame_register_unsigned (this_frame,
4035 tdep->m_profile_psp_ns_regnum);
4036 }
4037 else if (tdep->is_m)
4038 {
4039 cache->sp
4040 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4041
4042 cache->msp
4043 = get_frame_register_unsigned (this_frame,
4044 tdep->m_profile_msp_regnum);
4045 cache->psp
4046 = get_frame_register_unsigned (this_frame,
4047 tdep->m_profile_psp_regnum);
4048 }
4049 }
4050
4051 if (regnum == ARM_PC_REGNUM)
4052 {
4053 /* The PC is normally copied from the return column, which
4054 describes saves of LR. However, that version may have an
4055 extra bit set to indicate Thumb state. The bit is not
4056 part of the PC. */
4057
4058 /* Record in the frame whether the return address was signed. */
4059 if (tdep->have_pacbti)
4060 {
4061 CORE_ADDR ra_auth_code
4062 = frame_unwind_register_unsigned (this_frame,
4063 tdep->pacbti_pseudo_base);
4064
4065 if (ra_auth_code != 0)
4066 set_frame_previous_pc_masked (this_frame);
4067 }
4068
4069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4070 return frame_unwind_got_constant (this_frame, regnum,
4071 arm_addr_bits_remove (gdbarch, lr));
4072 }
4073 else if (regnum == ARM_PS_REGNUM)
4074 {
4075 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4076 cpsr = get_frame_register_unsigned (this_frame, regnum);
4077 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4078 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4079 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4080 }
4081 else if (arm_is_alternative_sp_register (tdep, regnum))
4082 {
4083 /* Handle the alternative SP registers on Cortex-M. */
4084 bool override_with_sp_value = false;
4085 CORE_ADDR val;
4086
4087 if (tdep->have_sec_ext)
4088 {
4089 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4090 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp);
4091 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4092 && (cache->msp_s == cache->sp);
4093 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4094 && (cache->msp_ns == cache->sp);
4095 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4096 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp);
4097 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4098 && (cache->psp_s == cache->sp);
4099 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4100 && (cache->psp_ns == cache->sp);
4101
4102 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4103 || is_psp || is_psp_s || is_psp_ns;
4104
4105 }
4106 else if (tdep->is_m)
4107 {
4108 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4109 && (cache->sp == cache->msp);
4110 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4111 && (cache->sp == cache->psp);
4112
4113 override_with_sp_value = is_msp || is_psp;
4114 }
4115
4116 if (override_with_sp_value)
4117 {
4118 /* Use value of SP from previous frame. */
4119 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4120 if (prev_frame)
4121 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4122 else
4123 val = get_frame_base (this_frame);
4124 }
4125 else
4126 /* Use value for the register from previous frame. */
4127 val = get_frame_register_unsigned (this_frame, regnum);
4128
4129 return frame_unwind_got_constant (this_frame, regnum, val);
4130 }
4131
4132 internal_error (_("Unexpected register %d"), regnum);
4133 }
4134
4135 /* Implement the stack_frame_destroyed_p gdbarch method. */
4136
4137 static int
4138 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4139 {
4140 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4141 unsigned int insn, insn2;
4142 int found_return = 0, found_stack_adjust = 0;
4143 CORE_ADDR func_start, func_end;
4144 CORE_ADDR scan_pc;
4145 gdb_byte buf[4];
4146
4147 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4148 return 0;
4149
4150 /* The epilogue is a sequence of instructions along the following lines:
4151
4152 - add stack frame size to SP or FP
4153 - [if frame pointer used] restore SP from FP
4154 - restore registers from SP [may include PC]
4155 - a return-type instruction [if PC wasn't already restored]
4156
4157 In a first pass, we scan forward from the current PC and verify the
4158 instructions we find as compatible with this sequence, ending in a
4159 return instruction.
4160
4161 However, this is not sufficient to distinguish indirect function calls
4162 within a function from indirect tail calls in the epilogue in some cases.
4163 Therefore, if we didn't already find any SP-changing instruction during
4164 forward scan, we add a backward scanning heuristic to ensure we actually
4165 are in the epilogue. */
4166
4167 scan_pc = pc;
4168 while (scan_pc < func_end && !found_return)
4169 {
4170 if (target_read_memory (scan_pc, buf, 2))
4171 break;
4172
4173 scan_pc += 2;
4174 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4175
4176 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4177 found_return = 1;
4178 else if (insn == 0x46f7) /* mov pc, lr */
4179 found_return = 1;
4180 else if (thumb_instruction_restores_sp (insn))
4181 {
4182 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4183 found_return = 1;
4184 }
4185 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4186 {
4187 if (target_read_memory (scan_pc, buf, 2))
4188 break;
4189
4190 scan_pc += 2;
4191 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4192
4193 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4194 {
4195 if (insn2 & 0x8000) /* <registers> include PC. */
4196 found_return = 1;
4197 }
4198 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4199 && (insn2 & 0x0fff) == 0x0b04)
4200 {
4201 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4202 found_return = 1;
4203 }
4204 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4205 && (insn2 & 0x0e00) == 0x0a00)
4206 ;
4207 else
4208 break;
4209 }
4210 else
4211 break;
4212 }
4213
4214 if (!found_return)
4215 return 0;
4216
4217 /* Since any instruction in the epilogue sequence, with the possible
4218 exception of return itself, updates the stack pointer, we need to
4219 scan backwards for at most one instruction. Try either a 16-bit or
4220 a 32-bit instruction. This is just a heuristic, so we do not worry
4221 too much about false positives. */
4222
4223 if (pc - 4 < func_start)
4224 return 0;
4225 if (target_read_memory (pc - 4, buf, 4))
4226 return 0;
4227
4228 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4229 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4230
4231 if (thumb_instruction_restores_sp (insn2))
4232 found_stack_adjust = 1;
4233 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4234 found_stack_adjust = 1;
4235 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4236 && (insn2 & 0x0fff) == 0x0b04)
4237 found_stack_adjust = 1;
4238 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4239 && (insn2 & 0x0e00) == 0x0a00)
4240 found_stack_adjust = 1;
4241
4242 return found_stack_adjust;
4243 }
4244
4245 static int
4246 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4247 {
4248 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4249 unsigned int insn;
4250 int found_return;
4251 CORE_ADDR func_start, func_end;
4252
4253 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4254 return 0;
4255
4256 /* We are in the epilogue if the previous instruction was a stack
4257 adjustment and the next instruction is a possible return (bx, mov
4258 pc, or pop). We could have to scan backwards to find the stack
4259 adjustment, or forwards to find the return, but this is a decent
4260 approximation. First scan forwards. */
4261
4262 found_return = 0;
4263 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4264 if (bits (insn, 28, 31) != INST_NV)
4265 {
4266 if ((insn & 0x0ffffff0) == 0x012fff10)
4267 /* BX. */
4268 found_return = 1;
4269 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4270 /* MOV PC. */
4271 found_return = 1;
4272 else if ((insn & 0x0fff0000) == 0x08bd0000
4273 && (insn & 0x0000c000) != 0)
4274 /* POP (LDMIA), including PC or LR. */
4275 found_return = 1;
4276 }
4277
4278 if (!found_return)
4279 return 0;
4280
4281 /* Scan backwards. This is just a heuristic, so do not worry about
4282 false positives from mode changes. */
4283
4284 if (pc < func_start + 4)
4285 return 0;
4286
4287 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4288 if (arm_instruction_restores_sp (insn))
4289 return 1;
4290
4291 return 0;
4292 }
4293
4294 /* Implement the stack_frame_destroyed_p gdbarch method. */
4295
4296 static int
4297 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4298 {
4299 if (arm_pc_is_thumb (gdbarch, pc))
4300 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4301 else
4302 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4303 }
4304
4305 /* When arguments must be pushed onto the stack, they go on in reverse
4306 order. The code below implements a FILO (stack) to do this. */
4307
4308 struct arm_stack_item
4309 {
4310 int len;
4311 struct arm_stack_item *prev;
4312 gdb_byte *data;
4313 };
4314
4315 static struct arm_stack_item *
4316 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4317 int len)
4318 {
4319 struct arm_stack_item *si;
4320 si = XNEW (struct arm_stack_item);
4321 si->data = (gdb_byte *) xmalloc (len);
4322 si->len = len;
4323 si->prev = prev;
4324 memcpy (si->data, contents, len);
4325 return si;
4326 }
4327
4328 static struct arm_stack_item *
4329 pop_stack_item (struct arm_stack_item *si)
4330 {
4331 struct arm_stack_item *dead = si;
4332 si = si->prev;
4333 xfree (dead->data);
4334 xfree (dead);
4335 return si;
4336 }
4337
4338 /* Implement the gdbarch type alignment method, overrides the generic
4339 alignment algorithm for anything that is arm specific. */
4340
4341 static ULONGEST
4342 arm_type_align (gdbarch *gdbarch, struct type *t)
4343 {
4344 t = check_typedef (t);
4345 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4346 {
4347 /* Use the natural alignment for vector types (the same for
4348 scalar type), but the maximum alignment is 64-bit. */
4349 if (t->length () > 8)
4350 return 8;
4351 else
4352 return t->length ();
4353 }
4354
4355 /* Allow the common code to calculate the alignment. */
4356 return 0;
4357 }
4358
4359 /* Possible base types for a candidate for passing and returning in
4360 VFP registers. */
4361
4362 enum arm_vfp_cprc_base_type
4363 {
4364 VFP_CPRC_UNKNOWN,
4365 VFP_CPRC_SINGLE,
4366 VFP_CPRC_DOUBLE,
4367 VFP_CPRC_VEC64,
4368 VFP_CPRC_VEC128
4369 };
4370
4371 /* The length of one element of base type B. */
4372
4373 static unsigned
4374 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4375 {
4376 switch (b)
4377 {
4378 case VFP_CPRC_SINGLE:
4379 return 4;
4380 case VFP_CPRC_DOUBLE:
4381 return 8;
4382 case VFP_CPRC_VEC64:
4383 return 8;
4384 case VFP_CPRC_VEC128:
4385 return 16;
4386 default:
4387 internal_error (_("Invalid VFP CPRC type: %d."),
4388 (int) b);
4389 }
4390 }
4391
4392 /* The character ('s', 'd' or 'q') for the type of VFP register used
4393 for passing base type B. */
4394
4395 static int
4396 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4397 {
4398 switch (b)
4399 {
4400 case VFP_CPRC_SINGLE:
4401 return 's';
4402 case VFP_CPRC_DOUBLE:
4403 return 'd';
4404 case VFP_CPRC_VEC64:
4405 return 'd';
4406 case VFP_CPRC_VEC128:
4407 return 'q';
4408 default:
4409 internal_error (_("Invalid VFP CPRC type: %d."),
4410 (int) b);
4411 }
4412 }
4413
4414 /* Determine whether T may be part of a candidate for passing and
4415 returning in VFP registers, ignoring the limit on the total number
4416 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4417 classification of the first valid component found; if it is not
4418 VFP_CPRC_UNKNOWN, all components must have the same classification
4419 as *BASE_TYPE. If it is found that T contains a type not permitted
4420 for passing and returning in VFP registers, a type differently
4421 classified from *BASE_TYPE, or two types differently classified
4422 from each other, return -1, otherwise return the total number of
4423 base-type elements found (possibly 0 in an empty structure or
4424 array). Vector types are not currently supported, matching the
4425 generic AAPCS support. */
4426
4427 static int
4428 arm_vfp_cprc_sub_candidate (struct type *t,
4429 enum arm_vfp_cprc_base_type *base_type)
4430 {
4431 t = check_typedef (t);
4432 switch (t->code ())
4433 {
4434 case TYPE_CODE_FLT:
4435 switch (t->length ())
4436 {
4437 case 4:
4438 if (*base_type == VFP_CPRC_UNKNOWN)
4439 *base_type = VFP_CPRC_SINGLE;
4440 else if (*base_type != VFP_CPRC_SINGLE)
4441 return -1;
4442 return 1;
4443
4444 case 8:
4445 if (*base_type == VFP_CPRC_UNKNOWN)
4446 *base_type = VFP_CPRC_DOUBLE;
4447 else if (*base_type != VFP_CPRC_DOUBLE)
4448 return -1;
4449 return 1;
4450
4451 default:
4452 return -1;
4453 }
4454 break;
4455
4456 case TYPE_CODE_COMPLEX:
4457 /* Arguments of complex T where T is one of the types float or
4458 double get treated as if they are implemented as:
4459
4460 struct complexT
4461 {
4462 T real;
4463 T imag;
4464 };
4465
4466 */
4467 switch (t->length ())
4468 {
4469 case 8:
4470 if (*base_type == VFP_CPRC_UNKNOWN)
4471 *base_type = VFP_CPRC_SINGLE;
4472 else if (*base_type != VFP_CPRC_SINGLE)
4473 return -1;
4474 return 2;
4475
4476 case 16:
4477 if (*base_type == VFP_CPRC_UNKNOWN)
4478 *base_type = VFP_CPRC_DOUBLE;
4479 else if (*base_type != VFP_CPRC_DOUBLE)
4480 return -1;
4481 return 2;
4482
4483 default:
4484 return -1;
4485 }
4486 break;
4487
4488 case TYPE_CODE_ARRAY:
4489 {
4490 if (t->is_vector ())
4491 {
4492 /* A 64-bit or 128-bit containerized vector type are VFP
4493 CPRCs. */
4494 switch (t->length ())
4495 {
4496 case 8:
4497 if (*base_type == VFP_CPRC_UNKNOWN)
4498 *base_type = VFP_CPRC_VEC64;
4499 return 1;
4500 case 16:
4501 if (*base_type == VFP_CPRC_UNKNOWN)
4502 *base_type = VFP_CPRC_VEC128;
4503 return 1;
4504 default:
4505 return -1;
4506 }
4507 }
4508 else
4509 {
4510 int count;
4511 unsigned unitlen;
4512
4513 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4514 base_type);
4515 if (count == -1)
4516 return -1;
4517 if (t->length () == 0)
4518 {
4519 gdb_assert (count == 0);
4520 return 0;
4521 }
4522 else if (count == 0)
4523 return -1;
4524 unitlen = arm_vfp_cprc_unit_length (*base_type);
4525 gdb_assert ((t->length () % unitlen) == 0);
4526 return t->length () / unitlen;
4527 }
4528 }
4529 break;
4530
4531 case TYPE_CODE_STRUCT:
4532 {
4533 int count = 0;
4534 unsigned unitlen;
4535 int i;
4536 for (i = 0; i < t->num_fields (); i++)
4537 {
4538 int sub_count = 0;
4539
4540 if (!t->field (i).is_static ())
4541 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4542 base_type);
4543 if (sub_count == -1)
4544 return -1;
4545 count += sub_count;
4546 }
4547 if (t->length () == 0)
4548 {
4549 gdb_assert (count == 0);
4550 return 0;
4551 }
4552 else if (count == 0)
4553 return -1;
4554 unitlen = arm_vfp_cprc_unit_length (*base_type);
4555 if (t->length () != unitlen * count)
4556 return -1;
4557 return count;
4558 }
4559
4560 case TYPE_CODE_UNION:
4561 {
4562 int count = 0;
4563 unsigned unitlen;
4564 int i;
4565 for (i = 0; i < t->num_fields (); i++)
4566 {
4567 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4568 base_type);
4569 if (sub_count == -1)
4570 return -1;
4571 count = (count > sub_count ? count : sub_count);
4572 }
4573 if (t->length () == 0)
4574 {
4575 gdb_assert (count == 0);
4576 return 0;
4577 }
4578 else if (count == 0)
4579 return -1;
4580 unitlen = arm_vfp_cprc_unit_length (*base_type);
4581 if (t->length () != unitlen * count)
4582 return -1;
4583 return count;
4584 }
4585
4586 default:
4587 break;
4588 }
4589
4590 return -1;
4591 }
4592
4593 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4594 if passed to or returned from a non-variadic function with the VFP
4595 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4596 *BASE_TYPE to the base type for T and *COUNT to the number of
4597 elements of that base type before returning. */
4598
4599 static int
4600 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4601 int *count)
4602 {
4603 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4604 int c = arm_vfp_cprc_sub_candidate (t, &b);
4605 if (c <= 0 || c > 4)
4606 return 0;
4607 *base_type = b;
4608 *count = c;
4609 return 1;
4610 }
4611
4612 /* Return 1 if the VFP ABI should be used for passing arguments to and
4613 returning values from a function of type FUNC_TYPE, 0
4614 otherwise. */
4615
4616 static int
4617 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4618 {
4619 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4620
4621 /* Variadic functions always use the base ABI. Assume that functions
4622 without debug info are not variadic. */
4623 if (func_type && check_typedef (func_type)->has_varargs ())
4624 return 0;
4625
4626 /* The VFP ABI is only supported as a variant of AAPCS. */
4627 if (tdep->arm_abi != ARM_ABI_AAPCS)
4628 return 0;
4629
4630 return tdep->fp_model == ARM_FLOAT_VFP;
4631 }
4632
4633 /* We currently only support passing parameters in integer registers, which
4634 conforms with GCC's default model, and VFP argument passing following
4635 the VFP variant of AAPCS. Several other variants exist and
4636 we should probably support some of them based on the selected ABI. */
4637
4638 static CORE_ADDR
4639 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4640 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4641 struct value **args, CORE_ADDR sp,
4642 function_call_return_method return_method,
4643 CORE_ADDR struct_addr)
4644 {
4645 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4646 int argnum;
4647 int argreg;
4648 int nstack;
4649 struct arm_stack_item *si = NULL;
4650 int use_vfp_abi;
4651 struct type *ftype;
4652 unsigned vfp_regs_free = (1 << 16) - 1;
4653 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4654
4655 /* Determine the type of this function and whether the VFP ABI
4656 applies. */
4657 ftype = check_typedef (function->type ());
4658 if (ftype->code () == TYPE_CODE_PTR)
4659 ftype = check_typedef (ftype->target_type ());
4660 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4661
4662 /* Set the return address. For the ARM, the return breakpoint is
4663 always at BP_ADDR. */
4664 if (arm_pc_is_thumb (gdbarch, bp_addr))
4665 bp_addr |= 1;
4666 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4667
4668 /* Walk through the list of args and determine how large a temporary
4669 stack is required. Need to take care here as structs may be
4670 passed on the stack, and we have to push them. */
4671 nstack = 0;
4672
4673 argreg = ARM_A1_REGNUM;
4674 nstack = 0;
4675
4676 /* The struct_return pointer occupies the first parameter
4677 passing register. */
4678 if (return_method == return_method_struct)
4679 {
4680 arm_debug_printf ("struct return in %s = %s",
4681 gdbarch_register_name (gdbarch, argreg),
4682 paddress (gdbarch, struct_addr));
4683
4684 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4685 argreg++;
4686 }
4687
4688 for (argnum = 0; argnum < nargs; argnum++)
4689 {
4690 int len;
4691 struct type *arg_type;
4692 struct type *target_type;
4693 enum type_code typecode;
4694 const bfd_byte *val;
4695 int align;
4696 enum arm_vfp_cprc_base_type vfp_base_type;
4697 int vfp_base_count;
4698 int may_use_core_reg = 1;
4699
4700 arg_type = check_typedef (args[argnum]->type ());
4701 len = arg_type->length ();
4702 target_type = arg_type->target_type ();
4703 typecode = arg_type->code ();
4704 val = args[argnum]->contents ().data ();
4705
4706 align = type_align (arg_type);
4707 /* Round alignment up to a whole number of words. */
4708 align = (align + ARM_INT_REGISTER_SIZE - 1)
4709 & ~(ARM_INT_REGISTER_SIZE - 1);
4710 /* Different ABIs have different maximum alignments. */
4711 if (tdep->arm_abi == ARM_ABI_APCS)
4712 {
4713 /* The APCS ABI only requires word alignment. */
4714 align = ARM_INT_REGISTER_SIZE;
4715 }
4716 else
4717 {
4718 /* The AAPCS requires at most doubleword alignment. */
4719 if (align > ARM_INT_REGISTER_SIZE * 2)
4720 align = ARM_INT_REGISTER_SIZE * 2;
4721 }
4722
4723 if (use_vfp_abi
4724 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4725 &vfp_base_count))
4726 {
4727 int regno;
4728 int unit_length;
4729 int shift;
4730 unsigned mask;
4731
4732 /* Because this is a CPRC it cannot go in a core register or
4733 cause a core register to be skipped for alignment.
4734 Either it goes in VFP registers and the rest of this loop
4735 iteration is skipped for this argument, or it goes on the
4736 stack (and the stack alignment code is correct for this
4737 case). */
4738 may_use_core_reg = 0;
4739
4740 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4741 shift = unit_length / 4;
4742 mask = (1 << (shift * vfp_base_count)) - 1;
4743 for (regno = 0; regno < 16; regno += shift)
4744 if (((vfp_regs_free >> regno) & mask) == mask)
4745 break;
4746
4747 if (regno < 16)
4748 {
4749 int reg_char;
4750 int reg_scaled;
4751 int i;
4752
4753 vfp_regs_free &= ~(mask << regno);
4754 reg_scaled = regno / shift;
4755 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4756 for (i = 0; i < vfp_base_count; i++)
4757 {
4758 char name_buf[4];
4759 int regnum;
4760 if (reg_char == 'q')
4761 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4762 val + i * unit_length);
4763 else
4764 {
4765 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4766 reg_char, reg_scaled + i);
4767 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4768 strlen (name_buf));
4769 regcache->cooked_write (regnum, val + i * unit_length);
4770 }
4771 }
4772 continue;
4773 }
4774 else
4775 {
4776 /* This CPRC could not go in VFP registers, so all VFP
4777 registers are now marked as used. */
4778 vfp_regs_free = 0;
4779 }
4780 }
4781
4782 /* Push stack padding for doubleword alignment. */
4783 if (nstack & (align - 1))
4784 {
4785 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4786 nstack += ARM_INT_REGISTER_SIZE;
4787 }
4788
4789 /* Doubleword aligned quantities must go in even register pairs. */
4790 if (may_use_core_reg
4791 && argreg <= ARM_LAST_ARG_REGNUM
4792 && align > ARM_INT_REGISTER_SIZE
4793 && argreg & 1)
4794 argreg++;
4795
4796 /* If the argument is a pointer to a function, and it is a
4797 Thumb function, create a LOCAL copy of the value and set
4798 the THUMB bit in it. */
4799 if (TYPE_CODE_PTR == typecode
4800 && target_type != NULL
4801 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4802 {
4803 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4804 if (arm_pc_is_thumb (gdbarch, regval))
4805 {
4806 bfd_byte *copy = (bfd_byte *) alloca (len);
4807 store_unsigned_integer (copy, len, byte_order,
4808 MAKE_THUMB_ADDR (regval));
4809 val = copy;
4810 }
4811 }
4812
4813 /* Copy the argument to general registers or the stack in
4814 register-sized pieces. Large arguments are split between
4815 registers and stack. */
4816 while (len > 0)
4817 {
4818 int partial_len = len < ARM_INT_REGISTER_SIZE
4819 ? len : ARM_INT_REGISTER_SIZE;
4820 CORE_ADDR regval
4821 = extract_unsigned_integer (val, partial_len, byte_order);
4822
4823 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4824 {
4825 /* The argument is being passed in a general purpose
4826 register. */
4827 if (byte_order == BFD_ENDIAN_BIG)
4828 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4829
4830 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4831 gdbarch_register_name (gdbarch, argreg),
4832 phex (regval, ARM_INT_REGISTER_SIZE));
4833
4834 regcache_cooked_write_unsigned (regcache, argreg, regval);
4835 argreg++;
4836 }
4837 else
4838 {
4839 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4840
4841 memset (buf, 0, sizeof (buf));
4842 store_unsigned_integer (buf, partial_len, byte_order, regval);
4843
4844 /* Push the arguments onto the stack. */
4845 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4846 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4847 nstack += ARM_INT_REGISTER_SIZE;
4848 }
4849
4850 len -= partial_len;
4851 val += partial_len;
4852 }
4853 }
4854 /* If we have an odd number of words to push, then decrement the stack
4855 by one word now, so first stack argument will be dword aligned. */
4856 if (nstack & 4)
4857 sp -= 4;
4858
4859 while (si)
4860 {
4861 sp -= si->len;
4862 write_memory (sp, si->data, si->len);
4863 si = pop_stack_item (si);
4864 }
4865
4866 /* Finally, update teh SP register. */
4867 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4868
4869 return sp;
4870 }
4871
4872
4873 /* Always align the frame to an 8-byte boundary. This is required on
4874 some platforms and harmless on the rest. */
4875
4876 static CORE_ADDR
4877 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4878 {
4879 /* Align the stack to eight bytes. */
4880 return sp & ~ (CORE_ADDR) 7;
4881 }
4882
4883 static void
4884 print_fpu_flags (struct ui_file *file, int flags)
4885 {
4886 if (flags & (1 << 0))
4887 gdb_puts ("IVO ", file);
4888 if (flags & (1 << 1))
4889 gdb_puts ("DVZ ", file);
4890 if (flags & (1 << 2))
4891 gdb_puts ("OFL ", file);
4892 if (flags & (1 << 3))
4893 gdb_puts ("UFL ", file);
4894 if (flags & (1 << 4))
4895 gdb_puts ("INX ", file);
4896 gdb_putc ('\n', file);
4897 }
4898
4899 /* Print interesting information about the floating point processor
4900 (if present) or emulator. */
4901 static void
4902 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4903 frame_info_ptr frame, const char *args)
4904 {
4905 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4906 int type;
4907
4908 type = (status >> 24) & 127;
4909 if (status & (1 << 31))
4910 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4911 else
4912 gdb_printf (file, _("Software FPU type %d\n"), type);
4913 /* i18n: [floating point unit] mask */
4914 gdb_puts (_("mask: "), file);
4915 print_fpu_flags (file, status >> 16);
4916 /* i18n: [floating point unit] flags */
4917 gdb_puts (_("flags: "), file);
4918 print_fpu_flags (file, status);
4919 }
4920
4921 /* Construct the ARM extended floating point type. */
4922 static struct type *
4923 arm_ext_type (struct gdbarch *gdbarch)
4924 {
4925 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4926
4927 if (!tdep->arm_ext_type)
4928 {
4929 type_allocator alloc (gdbarch);
4930 tdep->arm_ext_type
4931 = init_float_type (alloc, -1, "builtin_type_arm_ext",
4932 floatformats_arm_ext);
4933 }
4934
4935 return tdep->arm_ext_type;
4936 }
4937
4938 static struct type *
4939 arm_neon_double_type (struct gdbarch *gdbarch)
4940 {
4941 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4942
4943 if (tdep->neon_double_type == NULL)
4944 {
4945 struct type *t, *elem;
4946
4947 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4948 TYPE_CODE_UNION);
4949 elem = builtin_type (gdbarch)->builtin_uint8;
4950 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4951 elem = builtin_type (gdbarch)->builtin_uint16;
4952 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4953 elem = builtin_type (gdbarch)->builtin_uint32;
4954 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4955 elem = builtin_type (gdbarch)->builtin_uint64;
4956 append_composite_type_field (t, "u64", elem);
4957 elem = builtin_type (gdbarch)->builtin_float;
4958 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4959 elem = builtin_type (gdbarch)->builtin_double;
4960 append_composite_type_field (t, "f64", elem);
4961
4962 t->set_is_vector (true);
4963 t->set_name ("neon_d");
4964 tdep->neon_double_type = t;
4965 }
4966
4967 return tdep->neon_double_type;
4968 }
4969
4970 /* FIXME: The vector types are not correctly ordered on big-endian
4971 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4972 bits of d0 - regardless of what unit size is being held in d0. So
4973 the offset of the first uint8 in d0 is 7, but the offset of the
4974 first float is 4. This code works as-is for little-endian
4975 targets. */
4976
4977 static struct type *
4978 arm_neon_quad_type (struct gdbarch *gdbarch)
4979 {
4980 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4981
4982 if (tdep->neon_quad_type == NULL)
4983 {
4984 struct type *t, *elem;
4985
4986 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4987 TYPE_CODE_UNION);
4988 elem = builtin_type (gdbarch)->builtin_uint8;
4989 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4990 elem = builtin_type (gdbarch)->builtin_uint16;
4991 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4992 elem = builtin_type (gdbarch)->builtin_uint32;
4993 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4994 elem = builtin_type (gdbarch)->builtin_uint64;
4995 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4996 elem = builtin_type (gdbarch)->builtin_float;
4997 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4998 elem = builtin_type (gdbarch)->builtin_double;
4999 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
5000
5001 t->set_is_vector (true);
5002 t->set_name ("neon_q");
5003 tdep->neon_quad_type = t;
5004 }
5005
5006 return tdep->neon_quad_type;
5007 }
5008
5009 /* Return true if REGNUM is a Q pseudo register. Return false
5010 otherwise.
5011
5012 REGNUM is the raw register number and not a pseudo-relative register
5013 number. */
5014
5015 static bool
5016 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
5017 {
5018 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5019
5020 /* Q pseudo registers are available for both NEON (Q0~Q15) and
5021 MVE (Q0~Q7) features. */
5022 if (tdep->have_q_pseudos
5023 && regnum >= tdep->q_pseudo_base
5024 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
5025 return true;
5026
5027 return false;
5028 }
5029
5030 /* Return true if REGNUM is a VFP S pseudo register. Return false
5031 otherwise.
5032
5033 REGNUM is the raw register number and not a pseudo-relative register
5034 number. */
5035
5036 static bool
5037 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
5038 {
5039 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5040
5041 if (tdep->have_s_pseudos
5042 && regnum >= tdep->s_pseudo_base
5043 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
5044 return true;
5045
5046 return false;
5047 }
5048
5049 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
5050 otherwise.
5051
5052 REGNUM is the raw register number and not a pseudo-relative register
5053 number. */
5054
5055 static bool
5056 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5057 {
5058 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5059
5060 if (tdep->have_mve
5061 && regnum >= tdep->mve_pseudo_base
5062 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5063 return true;
5064
5065 return false;
5066 }
5067
5068 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5069 false otherwise.
5070
5071 REGNUM is the raw register number and not a pseudo-relative register
5072 number. */
5073
5074 static bool
5075 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5076 {
5077 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5078
5079 if (tdep->have_pacbti
5080 && regnum >= tdep->pacbti_pseudo_base
5081 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5082 return true;
5083
5084 return false;
5085 }
5086
5087 /* Return the GDB type object for the "standard" data type of data in
5088 register N. */
5089
5090 static struct type *
5091 arm_register_type (struct gdbarch *gdbarch, int regnum)
5092 {
5093 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5094
5095 if (is_s_pseudo (gdbarch, regnum))
5096 return builtin_type (gdbarch)->builtin_float;
5097
5098 if (is_q_pseudo (gdbarch, regnum))
5099 return arm_neon_quad_type (gdbarch);
5100
5101 if (is_mve_pseudo (gdbarch, regnum))
5102 return builtin_type (gdbarch)->builtin_int16;
5103
5104 if (is_pacbti_pseudo (gdbarch, regnum))
5105 return builtin_type (gdbarch)->builtin_uint32;
5106
5107 /* If the target description has register information, we are only
5108 in this function so that we can override the types of
5109 double-precision registers for NEON. */
5110 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5111 {
5112 struct type *t = tdesc_register_type (gdbarch, regnum);
5113
5114 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5115 && t->code () == TYPE_CODE_FLT
5116 && tdep->have_neon)
5117 return arm_neon_double_type (gdbarch);
5118 else
5119 return t;
5120 }
5121
5122 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5123 {
5124 if (!tdep->have_fpa_registers)
5125 return builtin_type (gdbarch)->builtin_void;
5126
5127 return arm_ext_type (gdbarch);
5128 }
5129 else if (regnum == ARM_SP_REGNUM)
5130 return builtin_type (gdbarch)->builtin_data_ptr;
5131 else if (regnum == ARM_PC_REGNUM)
5132 return builtin_type (gdbarch)->builtin_func_ptr;
5133 else if (regnum >= ARRAY_SIZE (arm_register_names))
5134 /* These registers are only supported on targets which supply
5135 an XML description. */
5136 return builtin_type (gdbarch)->builtin_int0;
5137 else
5138 return builtin_type (gdbarch)->builtin_uint32;
5139 }
5140
5141 /* Map a DWARF register REGNUM onto the appropriate GDB register
5142 number. */
5143
5144 static int
5145 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5146 {
5147 /* Core integer regs. */
5148 if (reg >= 0 && reg <= 15)
5149 return reg;
5150
5151 /* Legacy FPA encoding. These were once used in a way which
5152 overlapped with VFP register numbering, so their use is
5153 discouraged, but GDB doesn't support the ARM toolchain
5154 which used them for VFP. */
5155 if (reg >= 16 && reg <= 23)
5156 return ARM_F0_REGNUM + reg - 16;
5157
5158 /* New assignments for the FPA registers. */
5159 if (reg >= 96 && reg <= 103)
5160 return ARM_F0_REGNUM + reg - 96;
5161
5162 /* WMMX register assignments. */
5163 if (reg >= 104 && reg <= 111)
5164 return ARM_WCGR0_REGNUM + reg - 104;
5165
5166 if (reg >= 112 && reg <= 127)
5167 return ARM_WR0_REGNUM + reg - 112;
5168
5169 /* PACBTI register containing the Pointer Authentication Code. */
5170 if (reg == ARM_DWARF_RA_AUTH_CODE)
5171 {
5172 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5173
5174 if (tdep->have_pacbti)
5175 return tdep->pacbti_pseudo_base;
5176
5177 return -1;
5178 }
5179
5180 if (reg >= 192 && reg <= 199)
5181 return ARM_WC0_REGNUM + reg - 192;
5182
5183 /* VFP v2 registers. A double precision value is actually
5184 in d1 rather than s2, but the ABI only defines numbering
5185 for the single precision registers. This will "just work"
5186 in GDB for little endian targets (we'll read eight bytes,
5187 starting in s0 and then progressing to s1), but will be
5188 reversed on big endian targets with VFP. This won't
5189 be a problem for the new Neon quad registers; you're supposed
5190 to use DW_OP_piece for those. */
5191 if (reg >= 64 && reg <= 95)
5192 {
5193 char name_buf[4];
5194
5195 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5196 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5197 strlen (name_buf));
5198 }
5199
5200 /* VFP v3 / Neon registers. This range is also used for VFP v2
5201 registers, except that it now describes d0 instead of s0. */
5202 if (reg >= 256 && reg <= 287)
5203 {
5204 char name_buf[4];
5205
5206 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5207 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5208 strlen (name_buf));
5209 }
5210
5211 return -1;
5212 }
5213
5214 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5215 static int
5216 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5217 {
5218 int reg = regnum;
5219 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5220
5221 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5222 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5223
5224 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5225 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5226
5227 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5228 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5229
5230 if (reg < NUM_GREGS)
5231 return SIM_ARM_R0_REGNUM + reg;
5232 reg -= NUM_GREGS;
5233
5234 if (reg < NUM_FREGS)
5235 return SIM_ARM_FP0_REGNUM + reg;
5236 reg -= NUM_FREGS;
5237
5238 if (reg < NUM_SREGS)
5239 return SIM_ARM_FPS_REGNUM + reg;
5240 reg -= NUM_SREGS;
5241
5242 internal_error (_("Bad REGNUM %d"), regnum);
5243 }
5244
5245 static const unsigned char op_lit0 = DW_OP_lit0;
5246
5247 static void
5248 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5249 struct dwarf2_frame_state_reg *reg,
5250 frame_info_ptr this_frame)
5251 {
5252 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5253
5254 if (is_pacbti_pseudo (gdbarch, regnum))
5255 {
5256 /* Initialize RA_AUTH_CODE to zero. */
5257 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5258 reg->loc.exp.start = &op_lit0;
5259 reg->loc.exp.len = 1;
5260 return;
5261 }
5262
5263 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5264 {
5265 reg->how = DWARF2_FRAME_REG_FN;
5266 reg->loc.fn = arm_dwarf2_prev_register;
5267 }
5268 else if (regnum == ARM_SP_REGNUM)
5269 reg->how = DWARF2_FRAME_REG_CFA;
5270 else if (arm_is_alternative_sp_register (tdep, regnum))
5271 {
5272 /* Handle the alternative SP registers on Cortex-M. */
5273 reg->how = DWARF2_FRAME_REG_FN;
5274 reg->loc.fn = arm_dwarf2_prev_register;
5275 }
5276 }
5277
5278 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5279 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5280 NULL if an error occurs. BUF is freed. */
5281
5282 static gdb_byte *
5283 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5284 int old_len, int new_len)
5285 {
5286 gdb_byte *new_buf;
5287 int bytes_to_read = new_len - old_len;
5288
5289 new_buf = (gdb_byte *) xmalloc (new_len);
5290 memcpy (new_buf + bytes_to_read, buf, old_len);
5291 xfree (buf);
5292 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5293 {
5294 xfree (new_buf);
5295 return NULL;
5296 }
5297 return new_buf;
5298 }
5299
5300 /* An IT block is at most the 2-byte IT instruction followed by
5301 four 4-byte instructions. The furthest back we must search to
5302 find an IT block that affects the current instruction is thus
5303 2 + 3 * 4 == 14 bytes. */
5304 #define MAX_IT_BLOCK_PREFIX 14
5305
5306 /* Use a quick scan if there are more than this many bytes of
5307 code. */
5308 #define IT_SCAN_THRESHOLD 32
5309
5310 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5311 A breakpoint in an IT block may not be hit, depending on the
5312 condition flags. */
5313 static CORE_ADDR
5314 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5315 {
5316 gdb_byte *buf;
5317 char map_type;
5318 CORE_ADDR boundary, func_start;
5319 int buf_len;
5320 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5321 int i, any, last_it, last_it_count;
5322 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5323
5324 /* If we are using BKPT breakpoints, none of this is necessary. */
5325 if (tdep->thumb2_breakpoint == NULL)
5326 return bpaddr;
5327
5328 /* ARM mode does not have this problem. */
5329 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5330 return bpaddr;
5331
5332 /* We are setting a breakpoint in Thumb code that could potentially
5333 contain an IT block. The first step is to find how much Thumb
5334 code there is; we do not need to read outside of known Thumb
5335 sequences. */
5336 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5337 if (map_type == 0)
5338 /* Thumb-2 code must have mapping symbols to have a chance. */
5339 return bpaddr;
5340
5341 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5342
5343 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5344 && func_start > boundary)
5345 boundary = func_start;
5346
5347 /* Search for a candidate IT instruction. We have to do some fancy
5348 footwork to distinguish a real IT instruction from the second
5349 half of a 32-bit instruction, but there is no need for that if
5350 there's no candidate. */
5351 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5352 if (buf_len == 0)
5353 /* No room for an IT instruction. */
5354 return bpaddr;
5355
5356 buf = (gdb_byte *) xmalloc (buf_len);
5357 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5358 return bpaddr;
5359 any = 0;
5360 for (i = 0; i < buf_len; i += 2)
5361 {
5362 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5363 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5364 {
5365 any = 1;
5366 break;
5367 }
5368 }
5369
5370 if (any == 0)
5371 {
5372 xfree (buf);
5373 return bpaddr;
5374 }
5375
5376 /* OK, the code bytes before this instruction contain at least one
5377 halfword which resembles an IT instruction. We know that it's
5378 Thumb code, but there are still two possibilities. Either the
5379 halfword really is an IT instruction, or it is the second half of
5380 a 32-bit Thumb instruction. The only way we can tell is to
5381 scan forwards from a known instruction boundary. */
5382 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5383 {
5384 int definite;
5385
5386 /* There's a lot of code before this instruction. Start with an
5387 optimistic search; it's easy to recognize halfwords that can
5388 not be the start of a 32-bit instruction, and use that to
5389 lock on to the instruction boundaries. */
5390 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5391 if (buf == NULL)
5392 return bpaddr;
5393 buf_len = IT_SCAN_THRESHOLD;
5394
5395 definite = 0;
5396 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5397 {
5398 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5399 if (thumb_insn_size (inst1) == 2)
5400 {
5401 definite = 1;
5402 break;
5403 }
5404 }
5405
5406 /* At this point, if DEFINITE, BUF[I] is the first place we
5407 are sure that we know the instruction boundaries, and it is far
5408 enough from BPADDR that we could not miss an IT instruction
5409 affecting BPADDR. If ! DEFINITE, give up - start from a
5410 known boundary. */
5411 if (! definite)
5412 {
5413 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5414 bpaddr - boundary);
5415 if (buf == NULL)
5416 return bpaddr;
5417 buf_len = bpaddr - boundary;
5418 i = 0;
5419 }
5420 }
5421 else
5422 {
5423 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5424 if (buf == NULL)
5425 return bpaddr;
5426 buf_len = bpaddr - boundary;
5427 i = 0;
5428 }
5429
5430 /* Scan forwards. Find the last IT instruction before BPADDR. */
5431 last_it = -1;
5432 last_it_count = 0;
5433 while (i < buf_len)
5434 {
5435 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5436 last_it_count--;
5437 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5438 {
5439 last_it = i;
5440 if (inst1 & 0x0001)
5441 last_it_count = 4;
5442 else if (inst1 & 0x0002)
5443 last_it_count = 3;
5444 else if (inst1 & 0x0004)
5445 last_it_count = 2;
5446 else
5447 last_it_count = 1;
5448 }
5449 i += thumb_insn_size (inst1);
5450 }
5451
5452 xfree (buf);
5453
5454 if (last_it == -1)
5455 /* There wasn't really an IT instruction after all. */
5456 return bpaddr;
5457
5458 if (last_it_count < 1)
5459 /* It was too far away. */
5460 return bpaddr;
5461
5462 /* This really is a trouble spot. Move the breakpoint to the IT
5463 instruction. */
5464 return bpaddr - buf_len + last_it;
5465 }
5466
5467 /* ARM displaced stepping support.
5468
5469 Generally ARM displaced stepping works as follows:
5470
5471 1. When an instruction is to be single-stepped, it is first decoded by
5472 arm_process_displaced_insn. Depending on the type of instruction, it is
5473 then copied to a scratch location, possibly in a modified form. The
5474 copy_* set of functions performs such modification, as necessary. A
5475 breakpoint is placed after the modified instruction in the scratch space
5476 to return control to GDB. Note in particular that instructions which
5477 modify the PC will no longer do so after modification.
5478
5479 2. The instruction is single-stepped, by setting the PC to the scratch
5480 location address, and resuming. Control returns to GDB when the
5481 breakpoint is hit.
5482
5483 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5484 function used for the current instruction. This function's job is to
5485 put the CPU/memory state back to what it would have been if the
5486 instruction had been executed unmodified in its original location. */
5487
5488 /* NOP instruction (mov r0, r0). */
5489 #define ARM_NOP 0xe1a00000
5490 #define THUMB_NOP 0x4600
5491
5492 /* Helper for register reads for displaced stepping. In particular, this
5493 returns the PC as it would be seen by the instruction at its original
5494 location. */
5495
5496 ULONGEST
5497 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5498 int regno)
5499 {
5500 ULONGEST ret;
5501 CORE_ADDR from = dsc->insn_addr;
5502
5503 if (regno == ARM_PC_REGNUM)
5504 {
5505 /* Compute pipeline offset:
5506 - When executing an ARM instruction, PC reads as the address of the
5507 current instruction plus 8.
5508 - When executing a Thumb instruction, PC reads as the address of the
5509 current instruction plus 4. */
5510
5511 if (!dsc->is_thumb)
5512 from += 8;
5513 else
5514 from += 4;
5515
5516 displaced_debug_printf ("read pc value %.8lx",
5517 (unsigned long) from);
5518 return (ULONGEST) from;
5519 }
5520 else
5521 {
5522 regcache_cooked_read_unsigned (regs, regno, &ret);
5523
5524 displaced_debug_printf ("read r%d value %.8lx",
5525 regno, (unsigned long) ret);
5526
5527 return ret;
5528 }
5529 }
5530
5531 static int
5532 displaced_in_arm_mode (struct regcache *regs)
5533 {
5534 ULONGEST ps;
5535 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5536
5537 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5538
5539 return (ps & t_bit) == 0;
5540 }
5541
5542 /* Write to the PC as from a branch instruction. */
5543
5544 static void
5545 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5546 ULONGEST val)
5547 {
5548 if (!dsc->is_thumb)
5549 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5550 architecture versions < 6. */
5551 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5552 val & ~(ULONGEST) 0x3);
5553 else
5554 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5555 val & ~(ULONGEST) 0x1);
5556 }
5557
5558 /* Write to the PC as from a branch-exchange instruction. */
5559
5560 static void
5561 bx_write_pc (struct regcache *regs, ULONGEST val)
5562 {
5563 ULONGEST ps;
5564 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5565
5566 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5567
5568 if ((val & 1) == 1)
5569 {
5570 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5571 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5572 }
5573 else if ((val & 2) == 0)
5574 {
5575 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5576 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5577 }
5578 else
5579 {
5580 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5581 mode, align dest to 4 bytes). */
5582 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5583 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5584 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5585 }
5586 }
5587
5588 /* Write to the PC as if from a load instruction. */
5589
5590 static void
5591 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5592 ULONGEST val)
5593 {
5594 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5595 bx_write_pc (regs, val);
5596 else
5597 branch_write_pc (regs, dsc, val);
5598 }
5599
5600 /* Write to the PC as if from an ALU instruction. */
5601
5602 static void
5603 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5604 ULONGEST val)
5605 {
5606 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5607 bx_write_pc (regs, val);
5608 else
5609 branch_write_pc (regs, dsc, val);
5610 }
5611
5612 /* Helper for writing to registers for displaced stepping. Writing to the PC
5613 has a varying effects depending on the instruction which does the write:
5614 this is controlled by the WRITE_PC argument. */
5615
5616 void
5617 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5618 int regno, ULONGEST val, enum pc_write_style write_pc)
5619 {
5620 if (regno == ARM_PC_REGNUM)
5621 {
5622 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5623
5624 switch (write_pc)
5625 {
5626 case BRANCH_WRITE_PC:
5627 branch_write_pc (regs, dsc, val);
5628 break;
5629
5630 case BX_WRITE_PC:
5631 bx_write_pc (regs, val);
5632 break;
5633
5634 case LOAD_WRITE_PC:
5635 load_write_pc (regs, dsc, val);
5636 break;
5637
5638 case ALU_WRITE_PC:
5639 alu_write_pc (regs, dsc, val);
5640 break;
5641
5642 case CANNOT_WRITE_PC:
5643 warning (_("Instruction wrote to PC in an unexpected way when "
5644 "single-stepping"));
5645 break;
5646
5647 default:
5648 internal_error (_("Invalid argument to displaced_write_reg"));
5649 }
5650
5651 dsc->wrote_to_pc = 1;
5652 }
5653 else
5654 {
5655 displaced_debug_printf ("writing r%d value %.8lx",
5656 regno, (unsigned long) val);
5657 regcache_cooked_write_unsigned (regs, regno, val);
5658 }
5659 }
5660
5661 /* This function is used to concisely determine if an instruction INSN
5662 references PC. Register fields of interest in INSN should have the
5663 corresponding fields of BITMASK set to 0b1111. The function
5664 returns return 1 if any of these fields in INSN reference the PC
5665 (also 0b1111, r15), else it returns 0. */
5666
5667 static int
5668 insn_references_pc (uint32_t insn, uint32_t bitmask)
5669 {
5670 uint32_t lowbit = 1;
5671
5672 while (bitmask != 0)
5673 {
5674 uint32_t mask;
5675
5676 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5677 ;
5678
5679 if (!lowbit)
5680 break;
5681
5682 mask = lowbit * 0xf;
5683
5684 if ((insn & mask) == mask)
5685 return 1;
5686
5687 bitmask &= ~mask;
5688 }
5689
5690 return 0;
5691 }
5692
5693 /* The simplest copy function. Many instructions have the same effect no
5694 matter what address they are executed at: in those cases, use this. */
5695
5696 static int
5697 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5698 arm_displaced_step_copy_insn_closure *dsc)
5699 {
5700 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5701 (unsigned long) insn, iname);
5702
5703 dsc->modinsn[0] = insn;
5704
5705 return 0;
5706 }
5707
5708 static int
5709 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, const char *iname,
5711 arm_displaced_step_copy_insn_closure *dsc)
5712 {
5713 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5714 "unmodified", insn1, insn2, iname);
5715
5716 dsc->modinsn[0] = insn1;
5717 dsc->modinsn[1] = insn2;
5718 dsc->numinsns = 2;
5719
5720 return 0;
5721 }
5722
5723 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5724 modification. */
5725 static int
5726 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5727 const char *iname,
5728 arm_displaced_step_copy_insn_closure *dsc)
5729 {
5730 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5731 insn, iname);
5732
5733 dsc->modinsn[0] = insn;
5734
5735 return 0;
5736 }
5737
5738 /* Preload instructions with immediate offset. */
5739
5740 static void
5741 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5742 arm_displaced_step_copy_insn_closure *dsc)
5743 {
5744 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5745 if (!dsc->u.preload.immed)
5746 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5747 }
5748
5749 static void
5750 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5751 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5752 {
5753 ULONGEST rn_val;
5754 /* Preload instructions:
5755
5756 {pli/pld} [rn, #+/-imm]
5757 ->
5758 {pli/pld} [r0, #+/-imm]. */
5759
5760 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5761 rn_val = displaced_read_reg (regs, dsc, rn);
5762 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5763 dsc->u.preload.immed = 1;
5764
5765 dsc->cleanup = &cleanup_preload;
5766 }
5767
5768 static int
5769 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5770 arm_displaced_step_copy_insn_closure *dsc)
5771 {
5772 unsigned int rn = bits (insn, 16, 19);
5773
5774 if (!insn_references_pc (insn, 0x000f0000ul))
5775 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5776
5777 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5778
5779 dsc->modinsn[0] = insn & 0xfff0ffff;
5780
5781 install_preload (gdbarch, regs, dsc, rn);
5782
5783 return 0;
5784 }
5785
5786 static int
5787 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5788 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5789 {
5790 unsigned int rn = bits (insn1, 0, 3);
5791 unsigned int u_bit = bit (insn1, 7);
5792 int imm12 = bits (insn2, 0, 11);
5793 ULONGEST pc_val;
5794
5795 if (rn != ARM_PC_REGNUM)
5796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5797
5798 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5799 PLD (literal) Encoding T1. */
5800 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5801 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5802 imm12);
5803
5804 if (!u_bit)
5805 imm12 = -1 * imm12;
5806
5807 /* Rewrite instruction {pli/pld} PC imm12 into:
5808 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5809
5810 {pli/pld} [r0, r1]
5811
5812 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5813
5814 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5815 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5816
5817 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5818
5819 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5820 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5821 dsc->u.preload.immed = 0;
5822
5823 /* {pli/pld} [r0, r1] */
5824 dsc->modinsn[0] = insn1 & 0xfff0;
5825 dsc->modinsn[1] = 0xf001;
5826 dsc->numinsns = 2;
5827
5828 dsc->cleanup = &cleanup_preload;
5829 return 0;
5830 }
5831
5832 /* Preload instructions with register offset. */
5833
5834 static void
5835 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5836 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5837 unsigned int rm)
5838 {
5839 ULONGEST rn_val, rm_val;
5840
5841 /* Preload register-offset instructions:
5842
5843 {pli/pld} [rn, rm {, shift}]
5844 ->
5845 {pli/pld} [r0, r1 {, shift}]. */
5846
5847 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5848 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5849 rn_val = displaced_read_reg (regs, dsc, rn);
5850 rm_val = displaced_read_reg (regs, dsc, rm);
5851 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5852 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5853 dsc->u.preload.immed = 0;
5854
5855 dsc->cleanup = &cleanup_preload;
5856 }
5857
5858 static int
5859 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5860 struct regcache *regs,
5861 arm_displaced_step_copy_insn_closure *dsc)
5862 {
5863 unsigned int rn = bits (insn, 16, 19);
5864 unsigned int rm = bits (insn, 0, 3);
5865
5866
5867 if (!insn_references_pc (insn, 0x000f000ful))
5868 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5869
5870 displaced_debug_printf ("copying preload insn %.8lx",
5871 (unsigned long) insn);
5872
5873 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5874
5875 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5876 return 0;
5877 }
5878
5879 /* Copy/cleanup coprocessor load and store instructions. */
5880
5881 static void
5882 cleanup_copro_load_store (struct gdbarch *gdbarch,
5883 struct regcache *regs,
5884 arm_displaced_step_copy_insn_closure *dsc)
5885 {
5886 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5887
5888 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5889
5890 if (dsc->u.ldst.writeback)
5891 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5892 }
5893
5894 static void
5895 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5896 arm_displaced_step_copy_insn_closure *dsc,
5897 int writeback, unsigned int rn)
5898 {
5899 ULONGEST rn_val;
5900
5901 /* Coprocessor load/store instructions:
5902
5903 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5904 ->
5905 {stc/stc2} [r0, #+/-imm].
5906
5907 ldc/ldc2 are handled identically. */
5908
5909 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5910 rn_val = displaced_read_reg (regs, dsc, rn);
5911 /* PC should be 4-byte aligned. */
5912 rn_val = rn_val & 0xfffffffc;
5913 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5914
5915 dsc->u.ldst.writeback = writeback;
5916 dsc->u.ldst.rn = rn;
5917
5918 dsc->cleanup = &cleanup_copro_load_store;
5919 }
5920
5921 static int
5922 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5923 struct regcache *regs,
5924 arm_displaced_step_copy_insn_closure *dsc)
5925 {
5926 unsigned int rn = bits (insn, 16, 19);
5927
5928 if (!insn_references_pc (insn, 0x000f0000ul))
5929 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5930
5931 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5932 (unsigned long) insn);
5933
5934 dsc->modinsn[0] = insn & 0xfff0ffff;
5935
5936 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5937
5938 return 0;
5939 }
5940
5941 static int
5942 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5943 uint16_t insn2, struct regcache *regs,
5944 arm_displaced_step_copy_insn_closure *dsc)
5945 {
5946 unsigned int rn = bits (insn1, 0, 3);
5947
5948 if (rn != ARM_PC_REGNUM)
5949 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5950 "copro load/store", dsc);
5951
5952 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5953 insn1, insn2);
5954
5955 dsc->modinsn[0] = insn1 & 0xfff0;
5956 dsc->modinsn[1] = insn2;
5957 dsc->numinsns = 2;
5958
5959 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5960 doesn't support writeback, so pass 0. */
5961 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5962
5963 return 0;
5964 }
5965
5966 /* Clean up branch instructions (actually perform the branch, by setting
5967 PC). */
5968
5969 static void
5970 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5971 arm_displaced_step_copy_insn_closure *dsc)
5972 {
5973 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5974 int branch_taken = condition_true (dsc->u.branch.cond, status);
5975 enum pc_write_style write_pc = dsc->u.branch.exchange
5976 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5977
5978 if (!branch_taken)
5979 return;
5980
5981 if (dsc->u.branch.link)
5982 {
5983 /* The value of LR should be the next insn of current one. In order
5984 not to confuse logic handling later insn `bx lr', if current insn mode
5985 is Thumb, the bit 0 of LR value should be set to 1. */
5986 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5987
5988 if (dsc->is_thumb)
5989 next_insn_addr |= 0x1;
5990
5991 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5992 CANNOT_WRITE_PC);
5993 }
5994
5995 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5996 }
5997
5998 /* Copy B/BL/BLX instructions with immediate destinations. */
5999
6000 static void
6001 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6002 arm_displaced_step_copy_insn_closure *dsc,
6003 unsigned int cond, int exchange, int link, long offset)
6004 {
6005 /* Implement "BL<cond> <label>" as:
6006
6007 Preparation: cond <- instruction condition
6008 Insn: mov r0, r0 (nop)
6009 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6010
6011 B<cond> similar, but don't set r14 in cleanup. */
6012
6013 dsc->u.branch.cond = cond;
6014 dsc->u.branch.link = link;
6015 dsc->u.branch.exchange = exchange;
6016
6017 dsc->u.branch.dest = dsc->insn_addr;
6018 if (link && exchange)
6019 /* For BLX, offset is computed from the Align (PC, 4). */
6020 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6021
6022 if (dsc->is_thumb)
6023 dsc->u.branch.dest += 4 + offset;
6024 else
6025 dsc->u.branch.dest += 8 + offset;
6026
6027 dsc->cleanup = &cleanup_branch;
6028 }
6029 static int
6030 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6031 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6032 {
6033 unsigned int cond = bits (insn, 28, 31);
6034 int exchange = (cond == 0xf);
6035 int link = exchange || bit (insn, 24);
6036 long offset;
6037
6038 displaced_debug_printf ("copying %s immediate insn %.8lx",
6039 (exchange) ? "blx" : (link) ? "bl" : "b",
6040 (unsigned long) insn);
6041 if (exchange)
6042 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6043 then arrange the switch into Thumb mode. */
6044 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6045 else
6046 offset = bits (insn, 0, 23) << 2;
6047
6048 if (bit (offset, 25))
6049 offset = offset | ~0x3ffffff;
6050
6051 dsc->modinsn[0] = ARM_NOP;
6052
6053 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6054 return 0;
6055 }
6056
6057 static int
6058 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6059 uint16_t insn2, struct regcache *regs,
6060 arm_displaced_step_copy_insn_closure *dsc)
6061 {
6062 int link = bit (insn2, 14);
6063 int exchange = link && !bit (insn2, 12);
6064 int cond = INST_AL;
6065 long offset = 0;
6066 int j1 = bit (insn2, 13);
6067 int j2 = bit (insn2, 11);
6068 int s = sbits (insn1, 10, 10);
6069 int i1 = !(j1 ^ bit (insn1, 10));
6070 int i2 = !(j2 ^ bit (insn1, 10));
6071
6072 if (!link && !exchange) /* B */
6073 {
6074 offset = (bits (insn2, 0, 10) << 1);
6075 if (bit (insn2, 12)) /* Encoding T4 */
6076 {
6077 offset |= (bits (insn1, 0, 9) << 12)
6078 | (i2 << 22)
6079 | (i1 << 23)
6080 | (s << 24);
6081 cond = INST_AL;
6082 }
6083 else /* Encoding T3 */
6084 {
6085 offset |= (bits (insn1, 0, 5) << 12)
6086 | (j1 << 18)
6087 | (j2 << 19)
6088 | (s << 20);
6089 cond = bits (insn1, 6, 9);
6090 }
6091 }
6092 else
6093 {
6094 offset = (bits (insn1, 0, 9) << 12);
6095 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6096 offset |= exchange ?
6097 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6098 }
6099
6100 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6101 link ? (exchange) ? "blx" : "bl" : "b",
6102 insn1, insn2, offset);
6103
6104 dsc->modinsn[0] = THUMB_NOP;
6105
6106 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6107 return 0;
6108 }
6109
6110 /* Copy B Thumb instructions. */
6111 static int
6112 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6113 arm_displaced_step_copy_insn_closure *dsc)
6114 {
6115 unsigned int cond = 0;
6116 int offset = 0;
6117 unsigned short bit_12_15 = bits (insn, 12, 15);
6118 CORE_ADDR from = dsc->insn_addr;
6119
6120 if (bit_12_15 == 0xd)
6121 {
6122 /* offset = SignExtend (imm8:0, 32) */
6123 offset = sbits ((insn << 1), 0, 8);
6124 cond = bits (insn, 8, 11);
6125 }
6126 else if (bit_12_15 == 0xe) /* Encoding T2 */
6127 {
6128 offset = sbits ((insn << 1), 0, 11);
6129 cond = INST_AL;
6130 }
6131
6132 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6133 insn, offset);
6134
6135 dsc->u.branch.cond = cond;
6136 dsc->u.branch.link = 0;
6137 dsc->u.branch.exchange = 0;
6138 dsc->u.branch.dest = from + 4 + offset;
6139
6140 dsc->modinsn[0] = THUMB_NOP;
6141
6142 dsc->cleanup = &cleanup_branch;
6143
6144 return 0;
6145 }
6146
6147 /* Copy BX/BLX with register-specified destinations. */
6148
6149 static void
6150 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6151 arm_displaced_step_copy_insn_closure *dsc, int link,
6152 unsigned int cond, unsigned int rm)
6153 {
6154 /* Implement {BX,BLX}<cond> <reg>" as:
6155
6156 Preparation: cond <- instruction condition
6157 Insn: mov r0, r0 (nop)
6158 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6159
6160 Don't set r14 in cleanup for BX. */
6161
6162 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6163
6164 dsc->u.branch.cond = cond;
6165 dsc->u.branch.link = link;
6166
6167 dsc->u.branch.exchange = 1;
6168
6169 dsc->cleanup = &cleanup_branch;
6170 }
6171
6172 static int
6173 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6174 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6175 {
6176 unsigned int cond = bits (insn, 28, 31);
6177 /* BX: x12xxx1x
6178 BLX: x12xxx3x. */
6179 int link = bit (insn, 5);
6180 unsigned int rm = bits (insn, 0, 3);
6181
6182 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6183
6184 dsc->modinsn[0] = ARM_NOP;
6185
6186 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6187 return 0;
6188 }
6189
6190 static int
6191 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6192 struct regcache *regs,
6193 arm_displaced_step_copy_insn_closure *dsc)
6194 {
6195 int link = bit (insn, 7);
6196 unsigned int rm = bits (insn, 3, 6);
6197
6198 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6199
6200 dsc->modinsn[0] = THUMB_NOP;
6201
6202 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6203
6204 return 0;
6205 }
6206
6207
6208 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6209
6210 static void
6211 cleanup_alu_imm (struct gdbarch *gdbarch,
6212 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6213 {
6214 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6215 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6216 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6217 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6218 }
6219
6220 static int
6221 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6222 arm_displaced_step_copy_insn_closure *dsc)
6223 {
6224 unsigned int rn = bits (insn, 16, 19);
6225 unsigned int rd = bits (insn, 12, 15);
6226 unsigned int op = bits (insn, 21, 24);
6227 int is_mov = (op == 0xd);
6228 ULONGEST rd_val, rn_val;
6229
6230 if (!insn_references_pc (insn, 0x000ff000ul))
6231 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6232
6233 displaced_debug_printf ("copying immediate %s insn %.8lx",
6234 is_mov ? "move" : "ALU",
6235 (unsigned long) insn);
6236
6237 /* Instruction is of form:
6238
6239 <op><cond> rd, [rn,] #imm
6240
6241 Rewrite as:
6242
6243 Preparation: tmp1, tmp2 <- r0, r1;
6244 r0, r1 <- rd, rn
6245 Insn: <op><cond> r0, r1, #imm
6246 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6247 */
6248
6249 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6250 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6251 rn_val = displaced_read_reg (regs, dsc, rn);
6252 rd_val = displaced_read_reg (regs, dsc, rd);
6253 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6254 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6255 dsc->rd = rd;
6256
6257 if (is_mov)
6258 dsc->modinsn[0] = insn & 0xfff00fff;
6259 else
6260 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6261
6262 dsc->cleanup = &cleanup_alu_imm;
6263
6264 return 0;
6265 }
6266
6267 static int
6268 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6269 uint16_t insn2, struct regcache *regs,
6270 arm_displaced_step_copy_insn_closure *dsc)
6271 {
6272 unsigned int op = bits (insn1, 5, 8);
6273 unsigned int rn, rm, rd;
6274 ULONGEST rd_val, rn_val;
6275
6276 rn = bits (insn1, 0, 3); /* Rn */
6277 rm = bits (insn2, 0, 3); /* Rm */
6278 rd = bits (insn2, 8, 11); /* Rd */
6279
6280 /* This routine is only called for instruction MOV. */
6281 gdb_assert (op == 0x2 && rn == 0xf);
6282
6283 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6285
6286 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6287
6288 /* Instruction is of form:
6289
6290 <op><cond> rd, [rn,] #imm
6291
6292 Rewrite as:
6293
6294 Preparation: tmp1, tmp2 <- r0, r1;
6295 r0, r1 <- rd, rn
6296 Insn: <op><cond> r0, r1, #imm
6297 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6298 */
6299
6300 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6301 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6302 rn_val = displaced_read_reg (regs, dsc, rn);
6303 rd_val = displaced_read_reg (regs, dsc, rd);
6304 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6305 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6306 dsc->rd = rd;
6307
6308 dsc->modinsn[0] = insn1;
6309 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6310 dsc->numinsns = 2;
6311
6312 dsc->cleanup = &cleanup_alu_imm;
6313
6314 return 0;
6315 }
6316
6317 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6318
6319 static void
6320 cleanup_alu_reg (struct gdbarch *gdbarch,
6321 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6322 {
6323 ULONGEST rd_val;
6324 int i;
6325
6326 rd_val = displaced_read_reg (regs, dsc, 0);
6327
6328 for (i = 0; i < 3; i++)
6329 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6330
6331 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6332 }
6333
6334 static void
6335 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6336 arm_displaced_step_copy_insn_closure *dsc,
6337 unsigned int rd, unsigned int rn, unsigned int rm)
6338 {
6339 ULONGEST rd_val, rn_val, rm_val;
6340
6341 /* Instruction is of form:
6342
6343 <op><cond> rd, [rn,] rm [, <shift>]
6344
6345 Rewrite as:
6346
6347 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6348 r0, r1, r2 <- rd, rn, rm
6349 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6350 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6351 */
6352
6353 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6354 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6355 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6356 rd_val = displaced_read_reg (regs, dsc, rd);
6357 rn_val = displaced_read_reg (regs, dsc, rn);
6358 rm_val = displaced_read_reg (regs, dsc, rm);
6359 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6360 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6361 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6362 dsc->rd = rd;
6363
6364 dsc->cleanup = &cleanup_alu_reg;
6365 }
6366
6367 static int
6368 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6369 arm_displaced_step_copy_insn_closure *dsc)
6370 {
6371 unsigned int op = bits (insn, 21, 24);
6372 int is_mov = (op == 0xd);
6373
6374 if (!insn_references_pc (insn, 0x000ff00ful))
6375 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6376
6377 displaced_debug_printf ("copying reg %s insn %.8lx",
6378 is_mov ? "move" : "ALU", (unsigned long) insn);
6379
6380 if (is_mov)
6381 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6382 else
6383 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6384
6385 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6386 bits (insn, 0, 3));
6387 return 0;
6388 }
6389
6390 static int
6391 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6392 struct regcache *regs,
6393 arm_displaced_step_copy_insn_closure *dsc)
6394 {
6395 unsigned rm, rd;
6396
6397 rm = bits (insn, 3, 6);
6398 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6399
6400 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6401 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6402
6403 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6404
6405 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6406
6407 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6408
6409 return 0;
6410 }
6411
6412 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6413
6414 static void
6415 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6416 struct regcache *regs,
6417 arm_displaced_step_copy_insn_closure *dsc)
6418 {
6419 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6420 int i;
6421
6422 for (i = 0; i < 4; i++)
6423 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6424
6425 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6426 }
6427
6428 static void
6429 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6430 arm_displaced_step_copy_insn_closure *dsc,
6431 unsigned int rd, unsigned int rn, unsigned int rm,
6432 unsigned rs)
6433 {
6434 int i;
6435 ULONGEST rd_val, rn_val, rm_val, rs_val;
6436
6437 /* Instruction is of form:
6438
6439 <op><cond> rd, [rn,] rm, <shift> rs
6440
6441 Rewrite as:
6442
6443 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6444 r0, r1, r2, r3 <- rd, rn, rm, rs
6445 Insn: <op><cond> r0, r1, r2, <shift> r3
6446 Cleanup: tmp5 <- r0
6447 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6448 rd <- tmp5
6449 */
6450
6451 for (i = 0; i < 4; i++)
6452 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6453
6454 rd_val = displaced_read_reg (regs, dsc, rd);
6455 rn_val = displaced_read_reg (regs, dsc, rn);
6456 rm_val = displaced_read_reg (regs, dsc, rm);
6457 rs_val = displaced_read_reg (regs, dsc, rs);
6458 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6459 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6460 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6461 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6462 dsc->rd = rd;
6463 dsc->cleanup = &cleanup_alu_shifted_reg;
6464 }
6465
6466 static int
6467 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6468 struct regcache *regs,
6469 arm_displaced_step_copy_insn_closure *dsc)
6470 {
6471 unsigned int op = bits (insn, 21, 24);
6472 int is_mov = (op == 0xd);
6473 unsigned int rd, rn, rm, rs;
6474
6475 if (!insn_references_pc (insn, 0x000fff0ful))
6476 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6477
6478 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6479 is_mov ? "move" : "ALU",
6480 (unsigned long) insn);
6481
6482 rn = bits (insn, 16, 19);
6483 rm = bits (insn, 0, 3);
6484 rs = bits (insn, 8, 11);
6485 rd = bits (insn, 12, 15);
6486
6487 if (is_mov)
6488 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6489 else
6490 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6491
6492 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6493
6494 return 0;
6495 }
6496
6497 /* Clean up load instructions. */
6498
6499 static void
6500 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6501 arm_displaced_step_copy_insn_closure *dsc)
6502 {
6503 ULONGEST rt_val, rt_val2 = 0, rn_val;
6504
6505 rt_val = displaced_read_reg (regs, dsc, 0);
6506 if (dsc->u.ldst.xfersize == 8)
6507 rt_val2 = displaced_read_reg (regs, dsc, 1);
6508 rn_val = displaced_read_reg (regs, dsc, 2);
6509
6510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6511 if (dsc->u.ldst.xfersize > 4)
6512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6514 if (!dsc->u.ldst.immed)
6515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6516
6517 /* Handle register writeback. */
6518 if (dsc->u.ldst.writeback)
6519 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6520 /* Put result in right place. */
6521 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6522 if (dsc->u.ldst.xfersize == 8)
6523 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6524 }
6525
6526 /* Clean up store instructions. */
6527
6528 static void
6529 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6530 arm_displaced_step_copy_insn_closure *dsc)
6531 {
6532 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6533
6534 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6535 if (dsc->u.ldst.xfersize > 4)
6536 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6537 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6538 if (!dsc->u.ldst.immed)
6539 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6540 if (!dsc->u.ldst.restore_r4)
6541 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6542
6543 /* Writeback. */
6544 if (dsc->u.ldst.writeback)
6545 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6546 }
6547
6548 /* Copy "extra" load/store instructions. These are halfword/doubleword
6549 transfers, which have a different encoding to byte/word transfers. */
6550
6551 static int
6552 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6553 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6554 {
6555 unsigned int op1 = bits (insn, 20, 24);
6556 unsigned int op2 = bits (insn, 5, 6);
6557 unsigned int rt = bits (insn, 12, 15);
6558 unsigned int rn = bits (insn, 16, 19);
6559 unsigned int rm = bits (insn, 0, 3);
6560 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6561 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6562 int immed = (op1 & 0x4) != 0;
6563 int opcode;
6564 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6565
6566 if (!insn_references_pc (insn, 0x000ff00ful))
6567 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6568
6569 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6570 unprivileged ? "unprivileged " : "",
6571 (unsigned long) insn);
6572
6573 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6574
6575 if (opcode < 0)
6576 internal_error (_("copy_extra_ld_st: instruction decode error"));
6577
6578 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6579 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6580 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6581 if (!immed)
6582 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6583
6584 rt_val = displaced_read_reg (regs, dsc, rt);
6585 if (bytesize[opcode] == 8)
6586 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6587 rn_val = displaced_read_reg (regs, dsc, rn);
6588 if (!immed)
6589 rm_val = displaced_read_reg (regs, dsc, rm);
6590
6591 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6592 if (bytesize[opcode] == 8)
6593 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6594 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6595 if (!immed)
6596 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6597
6598 dsc->rd = rt;
6599 dsc->u.ldst.xfersize = bytesize[opcode];
6600 dsc->u.ldst.rn = rn;
6601 dsc->u.ldst.immed = immed;
6602 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6603 dsc->u.ldst.restore_r4 = 0;
6604
6605 if (immed)
6606 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6607 ->
6608 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6609 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6610 else
6611 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6612 ->
6613 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6614 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6615
6616 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6617
6618 return 0;
6619 }
6620
6621 /* Copy byte/half word/word loads and stores. */
6622
6623 static void
6624 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6625 arm_displaced_step_copy_insn_closure *dsc, int load,
6626 int immed, int writeback, int size, int usermode,
6627 int rt, int rm, int rn)
6628 {
6629 ULONGEST rt_val, rn_val, rm_val = 0;
6630
6631 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6632 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6633 if (!immed)
6634 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6635 if (!load)
6636 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6637
6638 rt_val = displaced_read_reg (regs, dsc, rt);
6639 rn_val = displaced_read_reg (regs, dsc, rn);
6640 if (!immed)
6641 rm_val = displaced_read_reg (regs, dsc, rm);
6642
6643 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6644 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6645 if (!immed)
6646 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6647 dsc->rd = rt;
6648 dsc->u.ldst.xfersize = size;
6649 dsc->u.ldst.rn = rn;
6650 dsc->u.ldst.immed = immed;
6651 dsc->u.ldst.writeback = writeback;
6652
6653 /* To write PC we can do:
6654
6655 Before this sequence of instructions:
6656 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6657 r2 is the Rn value got from displaced_read_reg.
6658
6659 Insn1: push {pc} Write address of STR instruction + offset on stack
6660 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6661 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6662 = addr(Insn1) + offset - addr(Insn3) - 8
6663 = offset - 16
6664 Insn4: add r4, r4, #8 r4 = offset - 8
6665 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6666 = from + offset
6667 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6668
6669 Otherwise we don't know what value to write for PC, since the offset is
6670 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6671 of this can be found in Section "Saving from r15" in
6672 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6673
6674 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6675 }
6676
6677
6678 static int
6679 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6680 uint16_t insn2, struct regcache *regs,
6681 arm_displaced_step_copy_insn_closure *dsc, int size)
6682 {
6683 unsigned int u_bit = bit (insn1, 7);
6684 unsigned int rt = bits (insn2, 12, 15);
6685 int imm12 = bits (insn2, 0, 11);
6686 ULONGEST pc_val;
6687
6688 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6689 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6690 imm12);
6691
6692 if (!u_bit)
6693 imm12 = -1 * imm12;
6694
6695 /* Rewrite instruction LDR Rt imm12 into:
6696
6697 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6698
6699 LDR R0, R2, R3,
6700
6701 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6702
6703
6704 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6705 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6706 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6707
6708 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6709
6710 pc_val = pc_val & 0xfffffffc;
6711
6712 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6713 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6714
6715 dsc->rd = rt;
6716
6717 dsc->u.ldst.xfersize = size;
6718 dsc->u.ldst.immed = 0;
6719 dsc->u.ldst.writeback = 0;
6720 dsc->u.ldst.restore_r4 = 0;
6721
6722 /* LDR R0, R2, R3 */
6723 dsc->modinsn[0] = 0xf852;
6724 dsc->modinsn[1] = 0x3;
6725 dsc->numinsns = 2;
6726
6727 dsc->cleanup = &cleanup_load;
6728
6729 return 0;
6730 }
6731
6732 static int
6733 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6734 uint16_t insn2, struct regcache *regs,
6735 arm_displaced_step_copy_insn_closure *dsc,
6736 int writeback, int immed)
6737 {
6738 unsigned int rt = bits (insn2, 12, 15);
6739 unsigned int rn = bits (insn1, 0, 3);
6740 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6741 /* In LDR (register), there is also a register Rm, which is not allowed to
6742 be PC, so we don't have to check it. */
6743
6744 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6745 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6746 dsc);
6747
6748 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6749 rt, rn, insn1, insn2);
6750
6751 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6752 0, rt, rm, rn);
6753
6754 dsc->u.ldst.restore_r4 = 0;
6755
6756 if (immed)
6757 /* ldr[b]<cond> rt, [rn, #imm], etc.
6758 ->
6759 ldr[b]<cond> r0, [r2, #imm]. */
6760 {
6761 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6762 dsc->modinsn[1] = insn2 & 0x0fff;
6763 }
6764 else
6765 /* ldr[b]<cond> rt, [rn, rm], etc.
6766 ->
6767 ldr[b]<cond> r0, [r2, r3]. */
6768 {
6769 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6770 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6771 }
6772
6773 dsc->numinsns = 2;
6774
6775 return 0;
6776 }
6777
6778
6779 static int
6780 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6781 struct regcache *regs,
6782 arm_displaced_step_copy_insn_closure *dsc,
6783 int load, int size, int usermode)
6784 {
6785 int immed = !bit (insn, 25);
6786 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6787 unsigned int rt = bits (insn, 12, 15);
6788 unsigned int rn = bits (insn, 16, 19);
6789 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6790
6791 if (!insn_references_pc (insn, 0x000ff00ful))
6792 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6793
6794 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6795 load ? (size == 1 ? "ldrb" : "ldr")
6796 : (size == 1 ? "strb" : "str"),
6797 usermode ? "t" : "",
6798 rt, rn,
6799 (unsigned long) insn);
6800
6801 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6802 usermode, rt, rm, rn);
6803
6804 if (load || rt != ARM_PC_REGNUM)
6805 {
6806 dsc->u.ldst.restore_r4 = 0;
6807
6808 if (immed)
6809 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6810 ->
6811 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6812 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6813 else
6814 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6815 ->
6816 {ldr,str}[b]<cond> r0, [r2, r3]. */
6817 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6818 }
6819 else
6820 {
6821 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6822 dsc->u.ldst.restore_r4 = 1;
6823 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6824 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6825 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6826 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6827 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6828
6829 /* As above. */
6830 if (immed)
6831 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6832 else
6833 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6834
6835 dsc->numinsns = 6;
6836 }
6837
6838 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6839
6840 return 0;
6841 }
6842
6843 /* Cleanup LDM instructions with fully-populated register list. This is an
6844 unfortunate corner case: it's impossible to implement correctly by modifying
6845 the instruction. The issue is as follows: we have an instruction,
6846
6847 ldm rN, {r0-r15}
6848
6849 which we must rewrite to avoid loading PC. A possible solution would be to
6850 do the load in two halves, something like (with suitable cleanup
6851 afterwards):
6852
6853 mov r8, rN
6854 ldm[id][ab] r8!, {r0-r7}
6855 str r7, <temp>
6856 ldm[id][ab] r8, {r7-r14}
6857 <bkpt>
6858
6859 but at present there's no suitable place for <temp>, since the scratch space
6860 is overwritten before the cleanup routine is called. For now, we simply
6861 emulate the instruction. */
6862
6863 static void
6864 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6865 arm_displaced_step_copy_insn_closure *dsc)
6866 {
6867 int inc = dsc->u.block.increment;
6868 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6869 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6870 uint32_t regmask = dsc->u.block.regmask;
6871 int regno = inc ? 0 : 15;
6872 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6873 int exception_return = dsc->u.block.load && dsc->u.block.user
6874 && (regmask & 0x8000) != 0;
6875 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6876 int do_transfer = condition_true (dsc->u.block.cond, status);
6877 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6878
6879 if (!do_transfer)
6880 return;
6881
6882 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6883 sensible we can do here. Complain loudly. */
6884 if (exception_return)
6885 error (_("Cannot single-step exception return"));
6886
6887 /* We don't handle any stores here for now. */
6888 gdb_assert (dsc->u.block.load != 0);
6889
6890 displaced_debug_printf ("emulating block transfer: %s %s %s",
6891 dsc->u.block.load ? "ldm" : "stm",
6892 dsc->u.block.increment ? "inc" : "dec",
6893 dsc->u.block.before ? "before" : "after");
6894
6895 while (regmask)
6896 {
6897 uint32_t memword;
6898
6899 if (inc)
6900 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6901 regno++;
6902 else
6903 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6904 regno--;
6905
6906 xfer_addr += bump_before;
6907
6908 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6909 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6910
6911 xfer_addr += bump_after;
6912
6913 regmask &= ~(1 << regno);
6914 }
6915
6916 if (dsc->u.block.writeback)
6917 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6918 CANNOT_WRITE_PC);
6919 }
6920
6921 /* Clean up an STM which included the PC in the register list. */
6922
6923 static void
6924 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6925 arm_displaced_step_copy_insn_closure *dsc)
6926 {
6927 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6928 int store_executed = condition_true (dsc->u.block.cond, status);
6929 CORE_ADDR pc_stored_at, transferred_regs
6930 = count_one_bits (dsc->u.block.regmask);
6931 CORE_ADDR stm_insn_addr;
6932 uint32_t pc_val;
6933 long offset;
6934 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6935
6936 /* If condition code fails, there's nothing else to do. */
6937 if (!store_executed)
6938 return;
6939
6940 if (dsc->u.block.increment)
6941 {
6942 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6943
6944 if (dsc->u.block.before)
6945 pc_stored_at += 4;
6946 }
6947 else
6948 {
6949 pc_stored_at = dsc->u.block.xfer_addr;
6950
6951 if (dsc->u.block.before)
6952 pc_stored_at -= 4;
6953 }
6954
6955 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6956 stm_insn_addr = dsc->scratch_base;
6957 offset = pc_val - stm_insn_addr;
6958
6959 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6960 offset);
6961
6962 /* Rewrite the stored PC to the proper value for the non-displaced original
6963 instruction. */
6964 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6965 dsc->insn_addr + offset);
6966 }
6967
6968 /* Clean up an LDM which includes the PC in the register list. We clumped all
6969 the registers in the transferred list into a contiguous range r0...rX (to
6970 avoid loading PC directly and losing control of the debugged program), so we
6971 must undo that here. */
6972
6973 static void
6974 cleanup_block_load_pc (struct gdbarch *gdbarch,
6975 struct regcache *regs,
6976 arm_displaced_step_copy_insn_closure *dsc)
6977 {
6978 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6979 int load_executed = condition_true (dsc->u.block.cond, status);
6980 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6981 unsigned int regs_loaded = count_one_bits (mask);
6982 unsigned int num_to_shuffle = regs_loaded, clobbered;
6983
6984 /* The method employed here will fail if the register list is fully populated
6985 (we need to avoid loading PC directly). */
6986 gdb_assert (num_to_shuffle < 16);
6987
6988 if (!load_executed)
6989 return;
6990
6991 clobbered = (1 << num_to_shuffle) - 1;
6992
6993 while (num_to_shuffle > 0)
6994 {
6995 if ((mask & (1 << write_reg)) != 0)
6996 {
6997 unsigned int read_reg = num_to_shuffle - 1;
6998
6999 if (read_reg != write_reg)
7000 {
7001 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7002 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7003 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
7004 read_reg, write_reg);
7005 }
7006 else
7007 displaced_debug_printf ("LDM: register r%d already in the right "
7008 "place", write_reg);
7009
7010 clobbered &= ~(1 << write_reg);
7011
7012 num_to_shuffle--;
7013 }
7014
7015 write_reg--;
7016 }
7017
7018 /* Restore any registers we scribbled over. */
7019 for (write_reg = 0; clobbered != 0; write_reg++)
7020 {
7021 if ((clobbered & (1 << write_reg)) != 0)
7022 {
7023 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7024 CANNOT_WRITE_PC);
7025 displaced_debug_printf ("LDM: restored clobbered register r%d",
7026 write_reg);
7027 clobbered &= ~(1 << write_reg);
7028 }
7029 }
7030
7031 /* Perform register writeback manually. */
7032 if (dsc->u.block.writeback)
7033 {
7034 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7035
7036 if (dsc->u.block.increment)
7037 new_rn_val += regs_loaded * 4;
7038 else
7039 new_rn_val -= regs_loaded * 4;
7040
7041 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7042 CANNOT_WRITE_PC);
7043 }
7044 }
7045
7046 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7047 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7048
7049 static int
7050 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7051 struct regcache *regs,
7052 arm_displaced_step_copy_insn_closure *dsc)
7053 {
7054 int load = bit (insn, 20);
7055 int user = bit (insn, 22);
7056 int increment = bit (insn, 23);
7057 int before = bit (insn, 24);
7058 int writeback = bit (insn, 21);
7059 int rn = bits (insn, 16, 19);
7060
7061 /* Block transfers which don't mention PC can be run directly
7062 out-of-line. */
7063 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7064 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7065
7066 if (rn == ARM_PC_REGNUM)
7067 {
7068 warning (_("displaced: Unpredictable LDM or STM with "
7069 "base register r15"));
7070 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7071 }
7072
7073 displaced_debug_printf ("copying block transfer insn %.8lx",
7074 (unsigned long) insn);
7075
7076 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7077 dsc->u.block.rn = rn;
7078
7079 dsc->u.block.load = load;
7080 dsc->u.block.user = user;
7081 dsc->u.block.increment = increment;
7082 dsc->u.block.before = before;
7083 dsc->u.block.writeback = writeback;
7084 dsc->u.block.cond = bits (insn, 28, 31);
7085
7086 dsc->u.block.regmask = insn & 0xffff;
7087
7088 if (load)
7089 {
7090 if ((insn & 0xffff) == 0xffff)
7091 {
7092 /* LDM with a fully-populated register list. This case is
7093 particularly tricky. Implement for now by fully emulating the
7094 instruction (which might not behave perfectly in all cases, but
7095 these instructions should be rare enough for that not to matter
7096 too much). */
7097 dsc->modinsn[0] = ARM_NOP;
7098
7099 dsc->cleanup = &cleanup_block_load_all;
7100 }
7101 else
7102 {
7103 /* LDM of a list of registers which includes PC. Implement by
7104 rewriting the list of registers to be transferred into a
7105 contiguous chunk r0...rX before doing the transfer, then shuffling
7106 registers into the correct places in the cleanup routine. */
7107 unsigned int regmask = insn & 0xffff;
7108 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7109 unsigned int i;
7110
7111 for (i = 0; i < num_in_list; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 /* Writeback makes things complicated. We need to avoid clobbering
7115 the base register with one of the registers in our modified
7116 register list, but just using a different register can't work in
7117 all cases, e.g.:
7118
7119 ldm r14!, {r0-r13,pc}
7120
7121 which would need to be rewritten as:
7122
7123 ldm rN!, {r0-r14}
7124
7125 but that can't work, because there's no free register for N.
7126
7127 Solve this by turning off the writeback bit, and emulating
7128 writeback manually in the cleanup routine. */
7129
7130 if (writeback)
7131 insn &= ~(1 << 21);
7132
7133 new_regmask = (1 << num_in_list) - 1;
7134
7135 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7136 "%.4x, modified list %.4x",
7137 rn, writeback ? "!" : "",
7138 (int) insn & 0xffff, new_regmask);
7139
7140 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7141
7142 dsc->cleanup = &cleanup_block_load_pc;
7143 }
7144 }
7145 else
7146 {
7147 /* STM of a list of registers which includes PC. Run the instruction
7148 as-is, but out of line: this will store the wrong value for the PC,
7149 so we must manually fix up the memory in the cleanup routine.
7150 Doing things this way has the advantage that we can auto-detect
7151 the offset of the PC write (which is architecture-dependent) in
7152 the cleanup routine. */
7153 dsc->modinsn[0] = insn;
7154
7155 dsc->cleanup = &cleanup_block_store_pc;
7156 }
7157
7158 return 0;
7159 }
7160
7161 static int
7162 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7163 struct regcache *regs,
7164 arm_displaced_step_copy_insn_closure *dsc)
7165 {
7166 int rn = bits (insn1, 0, 3);
7167 int load = bit (insn1, 4);
7168 int writeback = bit (insn1, 5);
7169
7170 /* Block transfers which don't mention PC can be run directly
7171 out-of-line. */
7172 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7173 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7174
7175 if (rn == ARM_PC_REGNUM)
7176 {
7177 warning (_("displaced: Unpredictable LDM or STM with "
7178 "base register r15"));
7179 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7180 "unpredictable ldm/stm", dsc);
7181 }
7182
7183 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7184 insn1, insn2);
7185
7186 /* Clear bit 13, since it should be always zero. */
7187 dsc->u.block.regmask = (insn2 & 0xdfff);
7188 dsc->u.block.rn = rn;
7189
7190 dsc->u.block.load = load;
7191 dsc->u.block.user = 0;
7192 dsc->u.block.increment = bit (insn1, 7);
7193 dsc->u.block.before = bit (insn1, 8);
7194 dsc->u.block.writeback = writeback;
7195 dsc->u.block.cond = INST_AL;
7196 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7197
7198 if (load)
7199 {
7200 if (dsc->u.block.regmask == 0xffff)
7201 {
7202 /* This branch is impossible to happen. */
7203 gdb_assert (0);
7204 }
7205 else
7206 {
7207 unsigned int regmask = dsc->u.block.regmask;
7208 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7209 unsigned int i;
7210
7211 for (i = 0; i < num_in_list; i++)
7212 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7213
7214 if (writeback)
7215 insn1 &= ~(1 << 5);
7216
7217 new_regmask = (1 << num_in_list) - 1;
7218
7219 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7220 "%.4x, modified list %.4x",
7221 rn, writeback ? "!" : "",
7222 (int) dsc->u.block.regmask, new_regmask);
7223
7224 dsc->modinsn[0] = insn1;
7225 dsc->modinsn[1] = (new_regmask & 0xffff);
7226 dsc->numinsns = 2;
7227
7228 dsc->cleanup = &cleanup_block_load_pc;
7229 }
7230 }
7231 else
7232 {
7233 dsc->modinsn[0] = insn1;
7234 dsc->modinsn[1] = insn2;
7235 dsc->numinsns = 2;
7236 dsc->cleanup = &cleanup_block_store_pc;
7237 }
7238 return 0;
7239 }
7240
7241 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7242 This is used to avoid a dependency on BFD's bfd_endian enum. */
7243
7244 ULONGEST
7245 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7246 int byte_order)
7247 {
7248 return read_memory_unsigned_integer (memaddr, len,
7249 (enum bfd_endian) byte_order);
7250 }
7251
7252 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7253
7254 CORE_ADDR
7255 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7256 CORE_ADDR val)
7257 {
7258 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
7259 }
7260
7261 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
7262
7263 static CORE_ADDR
7264 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7265 {
7266 return 0;
7267 }
7268
7269 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7270
7271 int
7272 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7273 {
7274 return arm_is_thumb (self->regcache);
7275 }
7276
7277 /* single_step() is called just before we want to resume the inferior,
7278 if we want to single-step it but there is no hardware or kernel
7279 single-step support. We find the target of the coming instructions
7280 and breakpoint them. */
7281
7282 std::vector<CORE_ADDR>
7283 arm_software_single_step (struct regcache *regcache)
7284 {
7285 struct gdbarch *gdbarch = regcache->arch ();
7286 struct arm_get_next_pcs next_pcs_ctx;
7287
7288 arm_get_next_pcs_ctor (&next_pcs_ctx,
7289 &arm_get_next_pcs_ops,
7290 gdbarch_byte_order (gdbarch),
7291 gdbarch_byte_order_for_code (gdbarch),
7292 0,
7293 regcache);
7294
7295 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7296
7297 for (CORE_ADDR &pc_ref : next_pcs)
7298 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7299
7300 return next_pcs;
7301 }
7302
7303 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7304 for Linux, where some SVC instructions must be treated specially. */
7305
7306 static void
7307 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7308 arm_displaced_step_copy_insn_closure *dsc)
7309 {
7310 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7311
7312 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7313 (unsigned long) resume_addr);
7314
7315 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7316 }
7317
7318
7319 /* Common copy routine for svc instruction. */
7320
7321 static int
7322 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7323 arm_displaced_step_copy_insn_closure *dsc)
7324 {
7325 /* Preparation: none.
7326 Insn: unmodified svc.
7327 Cleanup: pc <- insn_addr + insn_size. */
7328
7329 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7330 instruction. */
7331 dsc->wrote_to_pc = 1;
7332
7333 /* Allow OS-specific code to override SVC handling. */
7334 if (dsc->u.svc.copy_svc_os)
7335 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7336 else
7337 {
7338 dsc->cleanup = &cleanup_svc;
7339 return 0;
7340 }
7341 }
7342
7343 static int
7344 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7345 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7346 {
7347
7348 displaced_debug_printf ("copying svc insn %.8lx",
7349 (unsigned long) insn);
7350
7351 dsc->modinsn[0] = insn;
7352
7353 return install_svc (gdbarch, regs, dsc);
7354 }
7355
7356 static int
7357 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7358 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7359 {
7360
7361 displaced_debug_printf ("copying svc insn %.4x", insn);
7362
7363 dsc->modinsn[0] = insn;
7364
7365 return install_svc (gdbarch, regs, dsc);
7366 }
7367
7368 /* Copy undefined instructions. */
7369
7370 static int
7371 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7372 arm_displaced_step_copy_insn_closure *dsc)
7373 {
7374 displaced_debug_printf ("copying undefined insn %.8lx",
7375 (unsigned long) insn);
7376
7377 dsc->modinsn[0] = insn;
7378
7379 return 0;
7380 }
7381
7382 static int
7383 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7384 arm_displaced_step_copy_insn_closure *dsc)
7385 {
7386
7387 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7388 (unsigned short) insn1, (unsigned short) insn2);
7389
7390 dsc->modinsn[0] = insn1;
7391 dsc->modinsn[1] = insn2;
7392 dsc->numinsns = 2;
7393
7394 return 0;
7395 }
7396
7397 /* Copy unpredictable instructions. */
7398
7399 static int
7400 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7401 arm_displaced_step_copy_insn_closure *dsc)
7402 {
7403 displaced_debug_printf ("copying unpredictable insn %.8lx",
7404 (unsigned long) insn);
7405
7406 dsc->modinsn[0] = insn;
7407
7408 return 0;
7409 }
7410
7411 /* The decode_* functions are instruction decoding helpers. They mostly follow
7412 the presentation in the ARM ARM. */
7413
7414 static int
7415 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7416 struct regcache *regs,
7417 arm_displaced_step_copy_insn_closure *dsc)
7418 {
7419 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7420 unsigned int rn = bits (insn, 16, 19);
7421
7422 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7423 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7424 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7425 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7426 else if ((op1 & 0x60) == 0x20)
7427 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7428 else if ((op1 & 0x71) == 0x40)
7429 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7430 dsc);
7431 else if ((op1 & 0x77) == 0x41)
7432 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7433 else if ((op1 & 0x77) == 0x45)
7434 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7435 else if ((op1 & 0x77) == 0x51)
7436 {
7437 if (rn != 0xf)
7438 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7439 else
7440 return arm_copy_unpred (gdbarch, insn, dsc);
7441 }
7442 else if ((op1 & 0x77) == 0x55)
7443 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7444 else if (op1 == 0x57)
7445 switch (op2)
7446 {
7447 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7448 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7449 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7450 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7451 default: return arm_copy_unpred (gdbarch, insn, dsc);
7452 }
7453 else if ((op1 & 0x63) == 0x43)
7454 return arm_copy_unpred (gdbarch, insn, dsc);
7455 else if ((op2 & 0x1) == 0x0)
7456 switch (op1 & ~0x80)
7457 {
7458 case 0x61:
7459 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7460 case 0x65:
7461 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7462 case 0x71: case 0x75:
7463 /* pld/pldw reg. */
7464 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7465 case 0x63: case 0x67: case 0x73: case 0x77:
7466 return arm_copy_unpred (gdbarch, insn, dsc);
7467 default:
7468 return arm_copy_undef (gdbarch, insn, dsc);
7469 }
7470 else
7471 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7472 }
7473
7474 static int
7475 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7476 struct regcache *regs,
7477 arm_displaced_step_copy_insn_closure *dsc)
7478 {
7479 if (bit (insn, 27) == 0)
7480 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7481 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7482 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7483 {
7484 case 0x0: case 0x2:
7485 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7486
7487 case 0x1: case 0x3:
7488 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7489
7490 case 0x4: case 0x5: case 0x6: case 0x7:
7491 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7492
7493 case 0x8:
7494 switch ((insn & 0xe00000) >> 21)
7495 {
7496 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7497 /* stc/stc2. */
7498 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7499
7500 case 0x2:
7501 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7502
7503 default:
7504 return arm_copy_undef (gdbarch, insn, dsc);
7505 }
7506
7507 case 0x9:
7508 {
7509 int rn_f = (bits (insn, 16, 19) == 0xf);
7510 switch ((insn & 0xe00000) >> 21)
7511 {
7512 case 0x1: case 0x3:
7513 /* ldc/ldc2 imm (undefined for rn == pc). */
7514 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7515 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7516
7517 case 0x2:
7518 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7519
7520 case 0x4: case 0x5: case 0x6: case 0x7:
7521 /* ldc/ldc2 lit (undefined for rn != pc). */
7522 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7523 : arm_copy_undef (gdbarch, insn, dsc);
7524
7525 default:
7526 return arm_copy_undef (gdbarch, insn, dsc);
7527 }
7528 }
7529
7530 case 0xa:
7531 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7532
7533 case 0xb:
7534 if (bits (insn, 16, 19) == 0xf)
7535 /* ldc/ldc2 lit. */
7536 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7537 else
7538 return arm_copy_undef (gdbarch, insn, dsc);
7539
7540 case 0xc:
7541 if (bit (insn, 4))
7542 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7543 else
7544 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7545
7546 case 0xd:
7547 if (bit (insn, 4))
7548 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7549 else
7550 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7551
7552 default:
7553 return arm_copy_undef (gdbarch, insn, dsc);
7554 }
7555 }
7556
7557 /* Decode miscellaneous instructions in dp/misc encoding space. */
7558
7559 static int
7560 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7561 struct regcache *regs,
7562 arm_displaced_step_copy_insn_closure *dsc)
7563 {
7564 unsigned int op2 = bits (insn, 4, 6);
7565 unsigned int op = bits (insn, 21, 22);
7566
7567 switch (op2)
7568 {
7569 case 0x0:
7570 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7571
7572 case 0x1:
7573 if (op == 0x1) /* bx. */
7574 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7575 else if (op == 0x3)
7576 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7577 else
7578 return arm_copy_undef (gdbarch, insn, dsc);
7579
7580 case 0x2:
7581 if (op == 0x1)
7582 /* Not really supported. */
7583 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7584 else
7585 return arm_copy_undef (gdbarch, insn, dsc);
7586
7587 case 0x3:
7588 if (op == 0x1)
7589 return arm_copy_bx_blx_reg (gdbarch, insn,
7590 regs, dsc); /* blx register. */
7591 else
7592 return arm_copy_undef (gdbarch, insn, dsc);
7593
7594 case 0x5:
7595 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7596
7597 case 0x7:
7598 if (op == 0x1)
7599 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7600 else if (op == 0x3)
7601 /* Not really supported. */
7602 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7603 /* Fall through. */
7604
7605 default:
7606 return arm_copy_undef (gdbarch, insn, dsc);
7607 }
7608 }
7609
7610 static int
7611 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7612 struct regcache *regs,
7613 arm_displaced_step_copy_insn_closure *dsc)
7614 {
7615 if (bit (insn, 25))
7616 switch (bits (insn, 20, 24))
7617 {
7618 case 0x10:
7619 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7620
7621 case 0x14:
7622 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7623
7624 case 0x12: case 0x16:
7625 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7626
7627 default:
7628 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7629 }
7630 else
7631 {
7632 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7633
7634 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7635 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7636 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7637 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7638 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7639 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7641 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7642 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7643 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7644 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7645 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7646 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7647 /* 2nd arg means "unprivileged". */
7648 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7649 dsc);
7650 }
7651
7652 /* Should be unreachable. */
7653 return 1;
7654 }
7655
7656 static int
7657 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7658 struct regcache *regs,
7659 arm_displaced_step_copy_insn_closure *dsc)
7660 {
7661 int a = bit (insn, 25), b = bit (insn, 4);
7662 uint32_t op1 = bits (insn, 20, 24);
7663
7664 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7665 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7667 else if ((!a && (op1 & 0x17) == 0x02)
7668 || (a && (op1 & 0x17) == 0x02 && !b))
7669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7670 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7671 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7672 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7673 else if ((!a && (op1 & 0x17) == 0x03)
7674 || (a && (op1 & 0x17) == 0x03 && !b))
7675 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7676 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7677 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7678 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7679 else if ((!a && (op1 & 0x17) == 0x06)
7680 || (a && (op1 & 0x17) == 0x06 && !b))
7681 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7682 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7683 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7684 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7685 else if ((!a && (op1 & 0x17) == 0x07)
7686 || (a && (op1 & 0x17) == 0x07 && !b))
7687 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7688
7689 /* Should be unreachable. */
7690 return 1;
7691 }
7692
7693 static int
7694 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7695 arm_displaced_step_copy_insn_closure *dsc)
7696 {
7697 switch (bits (insn, 20, 24))
7698 {
7699 case 0x00: case 0x01: case 0x02: case 0x03:
7700 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7701
7702 case 0x04: case 0x05: case 0x06: case 0x07:
7703 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7704
7705 case 0x08: case 0x09: case 0x0a: case 0x0b:
7706 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7707 return arm_copy_unmodified (gdbarch, insn,
7708 "decode/pack/unpack/saturate/reverse", dsc);
7709
7710 case 0x18:
7711 if (bits (insn, 5, 7) == 0) /* op2. */
7712 {
7713 if (bits (insn, 12, 15) == 0xf)
7714 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7715 else
7716 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7717 }
7718 else
7719 return arm_copy_undef (gdbarch, insn, dsc);
7720
7721 case 0x1a: case 0x1b:
7722 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7723 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7724 else
7725 return arm_copy_undef (gdbarch, insn, dsc);
7726
7727 case 0x1c: case 0x1d:
7728 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7729 {
7730 if (bits (insn, 0, 3) == 0xf)
7731 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7732 else
7733 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7734 }
7735 else
7736 return arm_copy_undef (gdbarch, insn, dsc);
7737
7738 case 0x1e: case 0x1f:
7739 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7740 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7741 else
7742 return arm_copy_undef (gdbarch, insn, dsc);
7743 }
7744
7745 /* Should be unreachable. */
7746 return 1;
7747 }
7748
7749 static int
7750 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7751 struct regcache *regs,
7752 arm_displaced_step_copy_insn_closure *dsc)
7753 {
7754 if (bit (insn, 25))
7755 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7756 else
7757 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7758 }
7759
7760 static int
7761 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7762 struct regcache *regs,
7763 arm_displaced_step_copy_insn_closure *dsc)
7764 {
7765 unsigned int opcode = bits (insn, 20, 24);
7766
7767 switch (opcode)
7768 {
7769 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7770 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7771
7772 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7773 case 0x12: case 0x16:
7774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7775
7776 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7777 case 0x13: case 0x17:
7778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7779
7780 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7781 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7782 /* Note: no writeback for these instructions. Bit 25 will always be
7783 zero though (via caller), so the following works OK. */
7784 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7785 }
7786
7787 /* Should be unreachable. */
7788 return 1;
7789 }
7790
7791 /* Decode shifted register instructions. */
7792
7793 static int
7794 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7795 uint16_t insn2, struct regcache *regs,
7796 arm_displaced_step_copy_insn_closure *dsc)
7797 {
7798 /* PC is only allowed to be used in instruction MOV. */
7799
7800 unsigned int op = bits (insn1, 5, 8);
7801 unsigned int rn = bits (insn1, 0, 3);
7802
7803 if (op == 0x2 && rn == 0xf) /* MOV */
7804 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7805 else
7806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7807 "dp (shift reg)", dsc);
7808 }
7809
7810
7811 /* Decode extension register load/store. Exactly the same as
7812 arm_decode_ext_reg_ld_st. */
7813
7814 static int
7815 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7816 uint16_t insn2, struct regcache *regs,
7817 arm_displaced_step_copy_insn_closure *dsc)
7818 {
7819 unsigned int opcode = bits (insn1, 4, 8);
7820
7821 switch (opcode)
7822 {
7823 case 0x04: case 0x05:
7824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7825 "vfp/neon vmov", dsc);
7826
7827 case 0x08: case 0x0c: /* 01x00 */
7828 case 0x0a: case 0x0e: /* 01x10 */
7829 case 0x12: case 0x16: /* 10x10 */
7830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7831 "vfp/neon vstm/vpush", dsc);
7832
7833 case 0x09: case 0x0d: /* 01x01 */
7834 case 0x0b: case 0x0f: /* 01x11 */
7835 case 0x13: case 0x17: /* 10x11 */
7836 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7837 "vfp/neon vldm/vpop", dsc);
7838
7839 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7841 "vstr", dsc);
7842 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7843 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7844 }
7845
7846 /* Should be unreachable. */
7847 return 1;
7848 }
7849
7850 static int
7851 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7852 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7853 {
7854 unsigned int op1 = bits (insn, 20, 25);
7855 int op = bit (insn, 4);
7856 unsigned int coproc = bits (insn, 8, 11);
7857
7858 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7859 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7860 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7861 && (coproc & 0xe) != 0xa)
7862 /* stc/stc2. */
7863 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7864 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7865 && (coproc & 0xe) != 0xa)
7866 /* ldc/ldc2 imm/lit. */
7867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7868 else if ((op1 & 0x3e) == 0x00)
7869 return arm_copy_undef (gdbarch, insn, dsc);
7870 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7871 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7872 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7873 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7874 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7875 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7876 else if ((op1 & 0x30) == 0x20 && !op)
7877 {
7878 if ((coproc & 0xe) == 0xa)
7879 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7880 else
7881 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7882 }
7883 else if ((op1 & 0x30) == 0x20 && op)
7884 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7885 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7886 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7887 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7888 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7889 else if ((op1 & 0x30) == 0x30)
7890 return arm_copy_svc (gdbarch, insn, regs, dsc);
7891 else
7892 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7893 }
7894
7895 static int
7896 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7897 uint16_t insn2, struct regcache *regs,
7898 arm_displaced_step_copy_insn_closure *dsc)
7899 {
7900 unsigned int coproc = bits (insn2, 8, 11);
7901 unsigned int bit_5_8 = bits (insn1, 5, 8);
7902 unsigned int bit_9 = bit (insn1, 9);
7903 unsigned int bit_4 = bit (insn1, 4);
7904
7905 if (bit_9 == 0)
7906 {
7907 if (bit_5_8 == 2)
7908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7909 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7910 dsc);
7911 else if (bit_5_8 == 0) /* UNDEFINED. */
7912 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7913 else
7914 {
7915 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7916 if ((coproc & 0xe) == 0xa)
7917 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7918 dsc);
7919 else /* coproc is not 101x. */
7920 {
7921 if (bit_4 == 0) /* STC/STC2. */
7922 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7923 "stc/stc2", dsc);
7924 else /* LDC/LDC2 {literal, immediate}. */
7925 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7926 regs, dsc);
7927 }
7928 }
7929 }
7930 else
7931 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7932
7933 return 0;
7934 }
7935
7936 static void
7937 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7938 arm_displaced_step_copy_insn_closure *dsc, int rd)
7939 {
7940 /* ADR Rd, #imm
7941
7942 Rewrite as:
7943
7944 Preparation: Rd <- PC
7945 Insn: ADD Rd, #imm
7946 Cleanup: Null.
7947 */
7948
7949 /* Rd <- PC */
7950 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7951 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7952 }
7953
7954 static int
7955 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7956 arm_displaced_step_copy_insn_closure *dsc,
7957 int rd, unsigned int imm)
7958 {
7959
7960 /* Encoding T2: ADDS Rd, #imm */
7961 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7962
7963 install_pc_relative (gdbarch, regs, dsc, rd);
7964
7965 return 0;
7966 }
7967
7968 static int
7969 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7970 struct regcache *regs,
7971 arm_displaced_step_copy_insn_closure *dsc)
7972 {
7973 unsigned int rd = bits (insn, 8, 10);
7974 unsigned int imm8 = bits (insn, 0, 7);
7975
7976 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7977 rd, imm8, insn);
7978
7979 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7980 }
7981
7982 static int
7983 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7984 uint16_t insn2, struct regcache *regs,
7985 arm_displaced_step_copy_insn_closure *dsc)
7986 {
7987 unsigned int rd = bits (insn2, 8, 11);
7988 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7989 extract raw immediate encoding rather than computing immediate. When
7990 generating ADD or SUB instruction, we can simply perform OR operation to
7991 set immediate into ADD. */
7992 unsigned int imm_3_8 = insn2 & 0x70ff;
7993 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7994
7995 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7996 rd, imm_i, imm_3_8, insn1, insn2);
7997
7998 if (bit (insn1, 7)) /* Encoding T2 */
7999 {
8000 /* Encoding T3: SUB Rd, Rd, #imm */
8001 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8002 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8003 }
8004 else /* Encoding T3 */
8005 {
8006 /* Encoding T3: ADD Rd, Rd, #imm */
8007 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8008 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8009 }
8010 dsc->numinsns = 2;
8011
8012 install_pc_relative (gdbarch, regs, dsc, rd);
8013
8014 return 0;
8015 }
8016
8017 static int
8018 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
8019 struct regcache *regs,
8020 arm_displaced_step_copy_insn_closure *dsc)
8021 {
8022 unsigned int rt = bits (insn1, 8, 10);
8023 unsigned int pc;
8024 int imm8 = (bits (insn1, 0, 7) << 2);
8025
8026 /* LDR Rd, #imm8
8027
8028 Rwrite as:
8029
8030 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8031
8032 Insn: LDR R0, [R2, R3];
8033 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8034
8035 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
8036
8037 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8038 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8039 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8040 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8041 /* The assembler calculates the required value of the offset from the
8042 Align(PC,4) value of this instruction to the label. */
8043 pc = pc & 0xfffffffc;
8044
8045 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8046 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8047
8048 dsc->rd = rt;
8049 dsc->u.ldst.xfersize = 4;
8050 dsc->u.ldst.rn = 0;
8051 dsc->u.ldst.immed = 0;
8052 dsc->u.ldst.writeback = 0;
8053 dsc->u.ldst.restore_r4 = 0;
8054
8055 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8056
8057 dsc->cleanup = &cleanup_load;
8058
8059 return 0;
8060 }
8061
8062 /* Copy Thumb cbnz/cbz instruction. */
8063
8064 static int
8065 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8066 struct regcache *regs,
8067 arm_displaced_step_copy_insn_closure *dsc)
8068 {
8069 int non_zero = bit (insn1, 11);
8070 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8071 CORE_ADDR from = dsc->insn_addr;
8072 int rn = bits (insn1, 0, 2);
8073 int rn_val = displaced_read_reg (regs, dsc, rn);
8074
8075 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8076 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8077 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8078 condition is false, let it be, cleanup_branch will do nothing. */
8079 if (dsc->u.branch.cond)
8080 {
8081 dsc->u.branch.cond = INST_AL;
8082 dsc->u.branch.dest = from + 4 + imm5;
8083 }
8084 else
8085 dsc->u.branch.dest = from + 2;
8086
8087 dsc->u.branch.link = 0;
8088 dsc->u.branch.exchange = 0;
8089
8090 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8091 non_zero ? "cbnz" : "cbz",
8092 rn, rn_val, insn1, dsc->u.branch.dest);
8093
8094 dsc->modinsn[0] = THUMB_NOP;
8095
8096 dsc->cleanup = &cleanup_branch;
8097 return 0;
8098 }
8099
8100 /* Copy Table Branch Byte/Halfword */
8101 static int
8102 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8103 uint16_t insn2, struct regcache *regs,
8104 arm_displaced_step_copy_insn_closure *dsc)
8105 {
8106 ULONGEST rn_val, rm_val;
8107 int is_tbh = bit (insn2, 4);
8108 CORE_ADDR halfwords = 0;
8109 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8110
8111 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8112 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8113
8114 if (is_tbh)
8115 {
8116 gdb_byte buf[2];
8117
8118 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8119 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8120 }
8121 else
8122 {
8123 gdb_byte buf[1];
8124
8125 target_read_memory (rn_val + rm_val, buf, 1);
8126 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8127 }
8128
8129 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8130 is_tbh ? "tbh" : "tbb",
8131 (unsigned int) rn_val, (unsigned int) rm_val,
8132 (unsigned int) halfwords);
8133
8134 dsc->u.branch.cond = INST_AL;
8135 dsc->u.branch.link = 0;
8136 dsc->u.branch.exchange = 0;
8137 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8138
8139 dsc->cleanup = &cleanup_branch;
8140
8141 return 0;
8142 }
8143
8144 static void
8145 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8146 arm_displaced_step_copy_insn_closure *dsc)
8147 {
8148 /* PC <- r7 */
8149 int val = displaced_read_reg (regs, dsc, 7);
8150 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8151
8152 /* r7 <- r8 */
8153 val = displaced_read_reg (regs, dsc, 8);
8154 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8155
8156 /* r8 <- tmp[0] */
8157 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8158
8159 }
8160
8161 static int
8162 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8163 struct regcache *regs,
8164 arm_displaced_step_copy_insn_closure *dsc)
8165 {
8166 dsc->u.block.regmask = insn1 & 0x00ff;
8167
8168 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8169 to :
8170
8171 (1) register list is full, that is, r0-r7 are used.
8172 Prepare: tmp[0] <- r8
8173
8174 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8175 MOV r8, r7; Move value of r7 to r8;
8176 POP {r7}; Store PC value into r7.
8177
8178 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8179
8180 (2) register list is not full, supposing there are N registers in
8181 register list (except PC, 0 <= N <= 7).
8182 Prepare: for each i, 0 - N, tmp[i] <- ri.
8183
8184 POP {r0, r1, ...., rN};
8185
8186 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8187 from tmp[] properly.
8188 */
8189 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8190 dsc->u.block.regmask, insn1);
8191
8192 if (dsc->u.block.regmask == 0xff)
8193 {
8194 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8195
8196 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8197 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8198 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8199
8200 dsc->numinsns = 3;
8201 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8202 }
8203 else
8204 {
8205 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8206 unsigned int i;
8207 unsigned int new_regmask;
8208
8209 for (i = 0; i < num_in_list + 1; i++)
8210 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8211
8212 new_regmask = (1 << (num_in_list + 1)) - 1;
8213
8214 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8215 "modified list %.4x",
8216 (int) dsc->u.block.regmask, new_regmask);
8217
8218 dsc->u.block.regmask |= 0x8000;
8219 dsc->u.block.writeback = 0;
8220 dsc->u.block.cond = INST_AL;
8221
8222 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8223
8224 dsc->cleanup = &cleanup_block_load_pc;
8225 }
8226
8227 return 0;
8228 }
8229
8230 static void
8231 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8232 struct regcache *regs,
8233 arm_displaced_step_copy_insn_closure *dsc)
8234 {
8235 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8236 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8237 int err = 0;
8238
8239 /* 16-bit thumb instructions. */
8240 switch (op_bit_12_15)
8241 {
8242 /* Shift (imme), add, subtract, move and compare. */
8243 case 0: case 1: case 2: case 3:
8244 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8245 "shift/add/sub/mov/cmp",
8246 dsc);
8247 break;
8248 case 4:
8249 switch (op_bit_10_11)
8250 {
8251 case 0: /* Data-processing */
8252 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8253 "data-processing",
8254 dsc);
8255 break;
8256 case 1: /* Special data instructions and branch and exchange. */
8257 {
8258 unsigned short op = bits (insn1, 7, 9);
8259 if (op == 6 || op == 7) /* BX or BLX */
8260 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8261 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8262 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8263 else
8264 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8265 dsc);
8266 }
8267 break;
8268 default: /* LDR (literal) */
8269 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8270 }
8271 break;
8272 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8273 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8274 break;
8275 case 10:
8276 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8277 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8278 else /* Generate SP-relative address */
8279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8280 break;
8281 case 11: /* Misc 16-bit instructions */
8282 {
8283 switch (bits (insn1, 8, 11))
8284 {
8285 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8286 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8287 break;
8288 case 12: case 13: /* POP */
8289 if (bit (insn1, 8)) /* PC is in register list. */
8290 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8291 else
8292 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8293 break;
8294 case 15: /* If-Then, and hints */
8295 if (bits (insn1, 0, 3))
8296 /* If-Then makes up to four following instructions conditional.
8297 IT instruction itself is not conditional, so handle it as a
8298 common unmodified instruction. */
8299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8300 dsc);
8301 else
8302 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8303 break;
8304 default:
8305 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8306 }
8307 }
8308 break;
8309 case 12:
8310 if (op_bit_10_11 < 2) /* Store multiple registers */
8311 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8312 else /* Load multiple registers */
8313 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8314 break;
8315 case 13: /* Conditional branch and supervisor call */
8316 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8317 err = thumb_copy_b (gdbarch, insn1, dsc);
8318 else
8319 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8320 break;
8321 case 14: /* Unconditional branch */
8322 err = thumb_copy_b (gdbarch, insn1, dsc);
8323 break;
8324 default:
8325 err = 1;
8326 }
8327
8328 if (err)
8329 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8330 }
8331
8332 static int
8333 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8334 uint16_t insn1, uint16_t insn2,
8335 struct regcache *regs,
8336 arm_displaced_step_copy_insn_closure *dsc)
8337 {
8338 int rt = bits (insn2, 12, 15);
8339 int rn = bits (insn1, 0, 3);
8340 int op1 = bits (insn1, 7, 8);
8341
8342 switch (bits (insn1, 5, 6))
8343 {
8344 case 0: /* Load byte and memory hints */
8345 if (rt == 0xf) /* PLD/PLI */
8346 {
8347 if (rn == 0xf)
8348 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8349 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8350 else
8351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8352 "pli/pld", dsc);
8353 }
8354 else
8355 {
8356 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8357 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8358 1);
8359 else
8360 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8361 "ldrb{reg, immediate}/ldrbt",
8362 dsc);
8363 }
8364
8365 break;
8366 case 1: /* Load halfword and memory hints. */
8367 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8368 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8369 "pld/unalloc memhint", dsc);
8370 else
8371 {
8372 if (rn == 0xf)
8373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8374 2);
8375 else
8376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8377 "ldrh/ldrht", dsc);
8378 }
8379 break;
8380 case 2: /* Load word */
8381 {
8382 int insn2_bit_8_11 = bits (insn2, 8, 11);
8383
8384 if (rn == 0xf)
8385 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8386 else if (op1 == 0x1) /* Encoding T3 */
8387 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8388 0, 1);
8389 else /* op1 == 0x0 */
8390 {
8391 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8392 /* LDR (immediate) */
8393 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8394 dsc, bit (insn2, 8), 1);
8395 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8397 "ldrt", dsc);
8398 else
8399 /* LDR (register) */
8400 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8401 dsc, 0, 0);
8402 }
8403 break;
8404 }
8405 default:
8406 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8407 break;
8408 }
8409 return 0;
8410 }
8411
8412 static void
8413 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8414 uint16_t insn2, struct regcache *regs,
8415 arm_displaced_step_copy_insn_closure *dsc)
8416 {
8417 int err = 0;
8418 unsigned short op = bit (insn2, 15);
8419 unsigned int op1 = bits (insn1, 11, 12);
8420
8421 switch (op1)
8422 {
8423 case 1:
8424 {
8425 switch (bits (insn1, 9, 10))
8426 {
8427 case 0:
8428 if (bit (insn1, 6))
8429 {
8430 /* Load/store {dual, exclusive}, table branch. */
8431 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8432 && bits (insn2, 5, 7) == 0)
8433 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8434 dsc);
8435 else
8436 /* PC is not allowed to use in load/store {dual, exclusive}
8437 instructions. */
8438 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8439 "load/store dual/ex", dsc);
8440 }
8441 else /* load/store multiple */
8442 {
8443 switch (bits (insn1, 7, 8))
8444 {
8445 case 0: case 3: /* SRS, RFE */
8446 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8447 "srs/rfe", dsc);
8448 break;
8449 case 1: case 2: /* LDM/STM/PUSH/POP */
8450 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8451 break;
8452 }
8453 }
8454 break;
8455
8456 case 1:
8457 /* Data-processing (shift register). */
8458 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8459 dsc);
8460 break;
8461 default: /* Coprocessor instructions. */
8462 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8463 break;
8464 }
8465 break;
8466 }
8467 case 2: /* op1 = 2 */
8468 if (op) /* Branch and misc control. */
8469 {
8470 if (bit (insn2, 14) /* BLX/BL */
8471 || bit (insn2, 12) /* Unconditional branch */
8472 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8473 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8474 else
8475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8476 "misc ctrl", dsc);
8477 }
8478 else
8479 {
8480 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8481 {
8482 int dp_op = bits (insn1, 4, 8);
8483 int rn = bits (insn1, 0, 3);
8484 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8485 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8486 regs, dsc);
8487 else
8488 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8489 "dp/pb", dsc);
8490 }
8491 else /* Data processing (modified immediate) */
8492 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8493 "dp/mi", dsc);
8494 }
8495 break;
8496 case 3: /* op1 = 3 */
8497 switch (bits (insn1, 9, 10))
8498 {
8499 case 0:
8500 if (bit (insn1, 4))
8501 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8502 regs, dsc);
8503 else /* NEON Load/Store and Store single data item */
8504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8505 "neon elt/struct load/store",
8506 dsc);
8507 break;
8508 case 1: /* op1 = 3, bits (9, 10) == 1 */
8509 switch (bits (insn1, 7, 8))
8510 {
8511 case 0: case 1: /* Data processing (register) */
8512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8513 "dp(reg)", dsc);
8514 break;
8515 case 2: /* Multiply and absolute difference */
8516 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8517 "mul/mua/diff", dsc);
8518 break;
8519 case 3: /* Long multiply and divide */
8520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8521 "lmul/lmua", dsc);
8522 break;
8523 }
8524 break;
8525 default: /* Coprocessor instructions */
8526 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8527 break;
8528 }
8529 break;
8530 default:
8531 err = 1;
8532 }
8533
8534 if (err)
8535 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8536
8537 }
8538
8539 static void
8540 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8541 struct regcache *regs,
8542 arm_displaced_step_copy_insn_closure *dsc)
8543 {
8544 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8545 uint16_t insn1
8546 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8547
8548 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8549 insn1, (unsigned long) from);
8550
8551 dsc->is_thumb = 1;
8552 dsc->insn_size = thumb_insn_size (insn1);
8553 if (thumb_insn_size (insn1) == 4)
8554 {
8555 uint16_t insn2
8556 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8557 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8558 }
8559 else
8560 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8561 }
8562
8563 void
8564 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8565 CORE_ADDR to, struct regcache *regs,
8566 arm_displaced_step_copy_insn_closure *dsc)
8567 {
8568 int err = 0;
8569 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8570 uint32_t insn;
8571
8572 /* Most displaced instructions use a 1-instruction scratch space, so set this
8573 here and override below if/when necessary. */
8574 dsc->numinsns = 1;
8575 dsc->insn_addr = from;
8576 dsc->scratch_base = to;
8577 dsc->cleanup = NULL;
8578 dsc->wrote_to_pc = 0;
8579
8580 if (!displaced_in_arm_mode (regs))
8581 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8582
8583 dsc->is_thumb = 0;
8584 dsc->insn_size = 4;
8585 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8586 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8587 (unsigned long) insn, (unsigned long) from);
8588
8589 if ((insn & 0xf0000000) == 0xf0000000)
8590 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8591 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8592 {
8593 case 0x0: case 0x1: case 0x2: case 0x3:
8594 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8595 break;
8596
8597 case 0x4: case 0x5: case 0x6:
8598 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8599 break;
8600
8601 case 0x7:
8602 err = arm_decode_media (gdbarch, insn, dsc);
8603 break;
8604
8605 case 0x8: case 0x9: case 0xa: case 0xb:
8606 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8607 break;
8608
8609 case 0xc: case 0xd: case 0xe: case 0xf:
8610 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8611 break;
8612 }
8613
8614 if (err)
8615 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8616 }
8617
8618 /* Actually set up the scratch space for a displaced instruction. */
8619
8620 void
8621 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8622 CORE_ADDR to,
8623 arm_displaced_step_copy_insn_closure *dsc)
8624 {
8625 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8626 unsigned int i, len, offset;
8627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8628 int size = dsc->is_thumb ? 2 : 4;
8629 const gdb_byte *bkp_insn;
8630
8631 offset = 0;
8632 /* Poke modified instruction(s). */
8633 for (i = 0; i < dsc->numinsns; i++)
8634 {
8635 if (size == 4)
8636 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8637 dsc->modinsn[i], (unsigned long) to + offset);
8638 else if (size == 2)
8639 displaced_debug_printf ("writing insn %.4x at %.8lx",
8640 (unsigned short) dsc->modinsn[i],
8641 (unsigned long) to + offset);
8642
8643 write_memory_unsigned_integer (to + offset, size,
8644 byte_order_for_code,
8645 dsc->modinsn[i]);
8646 offset += size;
8647 }
8648
8649 /* Choose the correct breakpoint instruction. */
8650 if (dsc->is_thumb)
8651 {
8652 bkp_insn = tdep->thumb_breakpoint;
8653 len = tdep->thumb_breakpoint_size;
8654 }
8655 else
8656 {
8657 bkp_insn = tdep->arm_breakpoint;
8658 len = tdep->arm_breakpoint_size;
8659 }
8660
8661 /* Put breakpoint afterwards. */
8662 write_memory (to + offset, bkp_insn, len);
8663
8664 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8665 paddress (gdbarch, to));
8666 }
8667
8668 /* Entry point for cleaning things up after a displaced instruction has been
8669 single-stepped. */
8670
8671 void
8672 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8673 struct displaced_step_copy_insn_closure *dsc_,
8674 CORE_ADDR from, CORE_ADDR to,
8675 struct regcache *regs, bool completed_p)
8676 {
8677 /* The following block exists as a temporary measure while displaced
8678 stepping is fixed architecture at a time within GDB.
8679
8680 In an earlier implementation of displaced stepping, if GDB thought the
8681 displaced instruction had not been executed then this fix up function
8682 was never called. As a consequence, things that should be fixed by
8683 this function were left in an unfixed state.
8684
8685 However, it's not as simple as always calling this function; this
8686 function needs to be updated to decide what should be fixed up based
8687 on whether the displaced step executed or not, which requires each
8688 architecture to be considered individually.
8689
8690 Until this architecture is updated, this block replicates the old
8691 behaviour; we just restore the program counter register, and leave
8692 everything else unfixed. */
8693 if (!completed_p)
8694 {
8695 CORE_ADDR pc = regcache_read_pc (regs);
8696 pc = from + (pc - to);
8697 regcache_write_pc (regs, pc);
8698 return;
8699 }
8700
8701 arm_displaced_step_copy_insn_closure *dsc
8702 = (arm_displaced_step_copy_insn_closure *) dsc_;
8703
8704 if (dsc->cleanup)
8705 dsc->cleanup (gdbarch, regs, dsc);
8706
8707 if (!dsc->wrote_to_pc)
8708 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8709 dsc->insn_addr + dsc->insn_size);
8710
8711 }
8712
8713 #include "bfd-in2.h"
8714 #include "libcoff.h"
8715
8716 static int
8717 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8718 {
8719 gdb_disassemble_info *di
8720 = static_cast<gdb_disassemble_info *> (info->application_data);
8721 struct gdbarch *gdbarch = di->arch ();
8722
8723 if (arm_pc_is_thumb (gdbarch, memaddr))
8724 {
8725 static asymbol *asym;
8726 static combined_entry_type ce;
8727 static struct coff_symbol_struct csym;
8728 static struct bfd fake_bfd;
8729 static bfd_target fake_target;
8730
8731 if (csym.native == NULL)
8732 {
8733 /* Create a fake symbol vector containing a Thumb symbol.
8734 This is solely so that the code in print_insn_little_arm()
8735 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8736 the presence of a Thumb symbol and switch to decoding
8737 Thumb instructions. */
8738
8739 fake_target.flavour = bfd_target_coff_flavour;
8740 fake_bfd.xvec = &fake_target;
8741 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8742 csym.native = &ce;
8743 csym.symbol.the_bfd = &fake_bfd;
8744 csym.symbol.name = "fake";
8745 asym = (asymbol *) & csym;
8746 }
8747
8748 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8749 info->symbols = &asym;
8750 }
8751 else
8752 info->symbols = NULL;
8753
8754 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8755 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8756 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8757 the assert on the mismatch of info->mach and
8758 bfd_get_mach (current_program_space->exec_bfd ()) in
8759 default_print_insn. */
8760 if (current_program_space->exec_bfd () != NULL
8761 && (current_program_space->exec_bfd ()->arch_info
8762 == gdbarch_bfd_arch_info (gdbarch)))
8763 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8764
8765 return default_print_insn (memaddr, info);
8766 }
8767
8768 /* The following define instruction sequences that will cause ARM
8769 cpu's to take an undefined instruction trap. These are used to
8770 signal a breakpoint to GDB.
8771
8772 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8773 modes. A different instruction is required for each mode. The ARM
8774 cpu's can also be big or little endian. Thus four different
8775 instructions are needed to support all cases.
8776
8777 Note: ARMv4 defines several new instructions that will take the
8778 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8779 not in fact add the new instructions. The new undefined
8780 instructions in ARMv4 are all instructions that had no defined
8781 behaviour in earlier chips. There is no guarantee that they will
8782 raise an exception, but may be treated as NOP's. In practice, it
8783 may only safe to rely on instructions matching:
8784
8785 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8786 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8787 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8788
8789 Even this may only true if the condition predicate is true. The
8790 following use a condition predicate of ALWAYS so it is always TRUE.
8791
8792 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8793 and NetBSD all use a software interrupt rather than an undefined
8794 instruction to force a trap. This can be handled by by the
8795 abi-specific code during establishment of the gdbarch vector. */
8796
8797 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8798 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8799 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8800 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8801
8802 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8803 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8804 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8805 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8806
8807 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8808
8809 static int
8810 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8811 {
8812 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8813 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8814
8815 if (arm_pc_is_thumb (gdbarch, *pcptr))
8816 {
8817 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8818
8819 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8820 check whether we are replacing a 32-bit instruction. */
8821 if (tdep->thumb2_breakpoint != NULL)
8822 {
8823 gdb_byte buf[2];
8824
8825 if (target_read_memory (*pcptr, buf, 2) == 0)
8826 {
8827 unsigned short inst1;
8828
8829 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8830 if (thumb_insn_size (inst1) == 4)
8831 return ARM_BP_KIND_THUMB2;
8832 }
8833 }
8834
8835 return ARM_BP_KIND_THUMB;
8836 }
8837 else
8838 return ARM_BP_KIND_ARM;
8839
8840 }
8841
8842 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8843
8844 static const gdb_byte *
8845 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8846 {
8847 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8848
8849 switch (kind)
8850 {
8851 case ARM_BP_KIND_ARM:
8852 *size = tdep->arm_breakpoint_size;
8853 return tdep->arm_breakpoint;
8854 case ARM_BP_KIND_THUMB:
8855 *size = tdep->thumb_breakpoint_size;
8856 return tdep->thumb_breakpoint;
8857 case ARM_BP_KIND_THUMB2:
8858 *size = tdep->thumb2_breakpoint_size;
8859 return tdep->thumb2_breakpoint;
8860 default:
8861 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8862 }
8863 }
8864
8865 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8866
8867 static int
8868 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8869 struct regcache *regcache,
8870 CORE_ADDR *pcptr)
8871 {
8872 gdb_byte buf[4];
8873
8874 /* Check the memory pointed by PC is readable. */
8875 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8876 {
8877 struct arm_get_next_pcs next_pcs_ctx;
8878
8879 arm_get_next_pcs_ctor (&next_pcs_ctx,
8880 &arm_get_next_pcs_ops,
8881 gdbarch_byte_order (gdbarch),
8882 gdbarch_byte_order_for_code (gdbarch),
8883 0,
8884 regcache);
8885
8886 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8887
8888 /* If MEMADDR is the next instruction of current pc, do the
8889 software single step computation, and get the thumb mode by
8890 the destination address. */
8891 for (CORE_ADDR pc : next_pcs)
8892 {
8893 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8894 {
8895 if (IS_THUMB_ADDR (pc))
8896 {
8897 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8898 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8899 }
8900 else
8901 return ARM_BP_KIND_ARM;
8902 }
8903 }
8904 }
8905
8906 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8907 }
8908
8909 /* Extract from an array REGBUF containing the (raw) register state a
8910 function return value of type TYPE, and copy that, in virtual
8911 format, into VALBUF. */
8912
8913 static void
8914 arm_extract_return_value (struct type *type, struct regcache *regs,
8915 gdb_byte *valbuf)
8916 {
8917 struct gdbarch *gdbarch = regs->arch ();
8918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8919 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8920
8921 if (TYPE_CODE_FLT == type->code ())
8922 {
8923 switch (tdep->fp_model)
8924 {
8925 case ARM_FLOAT_FPA:
8926 {
8927 /* The value is in register F0 in internal format. We need to
8928 extract the raw value and then convert it to the desired
8929 internal type. */
8930 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8931
8932 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8933 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8934 valbuf, type);
8935 }
8936 break;
8937
8938 case ARM_FLOAT_SOFT_FPA:
8939 case ARM_FLOAT_SOFT_VFP:
8940 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8941 not using the VFP ABI code. */
8942 case ARM_FLOAT_VFP:
8943 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8944 if (type->length () > 4)
8945 regs->cooked_read (ARM_A1_REGNUM + 1,
8946 valbuf + ARM_INT_REGISTER_SIZE);
8947 break;
8948
8949 default:
8950 internal_error (_("arm_extract_return_value: "
8951 "Floating point model not supported"));
8952 break;
8953 }
8954 }
8955 else if (type->code () == TYPE_CODE_INT
8956 || type->code () == TYPE_CODE_CHAR
8957 || type->code () == TYPE_CODE_BOOL
8958 || type->code () == TYPE_CODE_PTR
8959 || TYPE_IS_REFERENCE (type)
8960 || type->code () == TYPE_CODE_ENUM
8961 || is_fixed_point_type (type))
8962 {
8963 /* If the type is a plain integer, then the access is
8964 straight-forward. Otherwise we have to play around a bit
8965 more. */
8966 int len = type->length ();
8967 int regno = ARM_A1_REGNUM;
8968 ULONGEST tmp;
8969
8970 while (len > 0)
8971 {
8972 /* By using store_unsigned_integer we avoid having to do
8973 anything special for small big-endian values. */
8974 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8975 store_unsigned_integer (valbuf,
8976 (len > ARM_INT_REGISTER_SIZE
8977 ? ARM_INT_REGISTER_SIZE : len),
8978 byte_order, tmp);
8979 len -= ARM_INT_REGISTER_SIZE;
8980 valbuf += ARM_INT_REGISTER_SIZE;
8981 }
8982 }
8983 else
8984 {
8985 /* For a structure or union the behaviour is as if the value had
8986 been stored to word-aligned memory and then loaded into
8987 registers with 32-bit load instruction(s). */
8988 int len = type->length ();
8989 int regno = ARM_A1_REGNUM;
8990 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8991
8992 while (len > 0)
8993 {
8994 regs->cooked_read (regno++, tmpbuf);
8995 memcpy (valbuf, tmpbuf,
8996 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8997 len -= ARM_INT_REGISTER_SIZE;
8998 valbuf += ARM_INT_REGISTER_SIZE;
8999 }
9000 }
9001 }
9002
9003
9004 /* Will a function return an aggregate type in memory or in a
9005 register? Return 0 if an aggregate type can be returned in a
9006 register, 1 if it must be returned in memory. */
9007
9008 static int
9009 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9010 {
9011 enum type_code code;
9012
9013 type = check_typedef (type);
9014
9015 /* Simple, non-aggregate types (ie not including vectors and
9016 complex) are always returned in a register (or registers). */
9017 code = type->code ();
9018 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9019 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9020 return 0;
9021
9022 if (TYPE_HAS_DYNAMIC_LENGTH (type))
9023 return 1;
9024
9025 if (TYPE_CODE_ARRAY == code && type->is_vector ())
9026 {
9027 /* Vector values should be returned using ARM registers if they
9028 are not over 16 bytes. */
9029 return (type->length () > 16);
9030 }
9031
9032 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9033 if (tdep->arm_abi != ARM_ABI_APCS)
9034 {
9035 /* The AAPCS says all aggregates not larger than a word are returned
9036 in a register. */
9037 if (type->length () <= ARM_INT_REGISTER_SIZE
9038 && language_pass_by_reference (type).trivially_copyable)
9039 return 0;
9040
9041 return 1;
9042 }
9043 else
9044 {
9045 int nRc;
9046
9047 /* All aggregate types that won't fit in a register must be returned
9048 in memory. */
9049 if (type->length () > ARM_INT_REGISTER_SIZE
9050 || !language_pass_by_reference (type).trivially_copyable)
9051 return 1;
9052
9053 /* In the ARM ABI, "integer" like aggregate types are returned in
9054 registers. For an aggregate type to be integer like, its size
9055 must be less than or equal to ARM_INT_REGISTER_SIZE and the
9056 offset of each addressable subfield must be zero. Note that bit
9057 fields are not addressable, and all addressable subfields of
9058 unions always start at offset zero.
9059
9060 This function is based on the behaviour of GCC 2.95.1.
9061 See: gcc/arm.c: arm_return_in_memory() for details.
9062
9063 Note: All versions of GCC before GCC 2.95.2 do not set up the
9064 parameters correctly for a function returning the following
9065 structure: struct { float f;}; This should be returned in memory,
9066 not a register. Richard Earnshaw sent me a patch, but I do not
9067 know of any way to detect if a function like the above has been
9068 compiled with the correct calling convention. */
9069
9070 /* Assume all other aggregate types can be returned in a register.
9071 Run a check for structures, unions and arrays. */
9072 nRc = 0;
9073
9074 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9075 {
9076 int i;
9077 /* Need to check if this struct/union is "integer" like. For
9078 this to be true, its size must be less than or equal to
9079 ARM_INT_REGISTER_SIZE and the offset of each addressable
9080 subfield must be zero. Note that bit fields are not
9081 addressable, and unions always start at offset zero. If any
9082 of the subfields is a floating point type, the struct/union
9083 cannot be an integer type. */
9084
9085 /* For each field in the object, check:
9086 1) Is it FP? --> yes, nRc = 1;
9087 2) Is it addressable (bitpos != 0) and
9088 not packed (bitsize == 0)?
9089 --> yes, nRc = 1
9090 */
9091
9092 for (i = 0; i < type->num_fields (); i++)
9093 {
9094 enum type_code field_type_code;
9095
9096 field_type_code
9097 = check_typedef (type->field (i).type ())->code ();
9098
9099 /* Is it a floating point type field? */
9100 if (field_type_code == TYPE_CODE_FLT)
9101 {
9102 nRc = 1;
9103 break;
9104 }
9105
9106 /* If bitpos != 0, then we have to care about it. */
9107 if (type->field (i).loc_bitpos () != 0)
9108 {
9109 /* Bitfields are not addressable. If the field bitsize is
9110 zero, then the field is not packed. Hence it cannot be
9111 a bitfield or any other packed type. */
9112 if (type->field (i).bitsize () == 0)
9113 {
9114 nRc = 1;
9115 break;
9116 }
9117 }
9118 }
9119 }
9120
9121 return nRc;
9122 }
9123 }
9124
9125 /* Write into appropriate registers a function return value of type
9126 TYPE, given in virtual format. */
9127
9128 static void
9129 arm_store_return_value (struct type *type, struct regcache *regs,
9130 const gdb_byte *valbuf)
9131 {
9132 struct gdbarch *gdbarch = regs->arch ();
9133 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9134
9135 if (type->code () == TYPE_CODE_FLT)
9136 {
9137 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9138 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9139
9140 switch (tdep->fp_model)
9141 {
9142 case ARM_FLOAT_FPA:
9143
9144 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9145 regs->cooked_write (ARM_F0_REGNUM, buf);
9146 break;
9147
9148 case ARM_FLOAT_SOFT_FPA:
9149 case ARM_FLOAT_SOFT_VFP:
9150 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9151 not using the VFP ABI code. */
9152 case ARM_FLOAT_VFP:
9153 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9154 if (type->length () > 4)
9155 regs->cooked_write (ARM_A1_REGNUM + 1,
9156 valbuf + ARM_INT_REGISTER_SIZE);
9157 break;
9158
9159 default:
9160 internal_error (_("arm_store_return_value: Floating "
9161 "point model not supported"));
9162 break;
9163 }
9164 }
9165 else if (type->code () == TYPE_CODE_INT
9166 || type->code () == TYPE_CODE_CHAR
9167 || type->code () == TYPE_CODE_BOOL
9168 || type->code () == TYPE_CODE_PTR
9169 || TYPE_IS_REFERENCE (type)
9170 || type->code () == TYPE_CODE_ENUM)
9171 {
9172 if (type->length () <= 4)
9173 {
9174 /* Values of one word or less are zero/sign-extended and
9175 returned in r0. */
9176 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9177 LONGEST val = unpack_long (type, valbuf);
9178
9179 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9180 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9181 }
9182 else
9183 {
9184 /* Integral values greater than one word are stored in consecutive
9185 registers starting with r0. This will always be a multiple of
9186 the regiser size. */
9187 int len = type->length ();
9188 int regno = ARM_A1_REGNUM;
9189
9190 while (len > 0)
9191 {
9192 regs->cooked_write (regno++, valbuf);
9193 len -= ARM_INT_REGISTER_SIZE;
9194 valbuf += ARM_INT_REGISTER_SIZE;
9195 }
9196 }
9197 }
9198 else
9199 {
9200 /* For a structure or union the behaviour is as if the value had
9201 been stored to word-aligned memory and then loaded into
9202 registers with 32-bit load instruction(s). */
9203 int len = type->length ();
9204 int regno = ARM_A1_REGNUM;
9205 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9206
9207 while (len > 0)
9208 {
9209 memcpy (tmpbuf, valbuf,
9210 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9211 regs->cooked_write (regno++, tmpbuf);
9212 len -= ARM_INT_REGISTER_SIZE;
9213 valbuf += ARM_INT_REGISTER_SIZE;
9214 }
9215 }
9216 }
9217
9218
9219 /* Handle function return values. */
9220
9221 static enum return_value_convention
9222 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9223 struct type *valtype, struct regcache *regcache,
9224 struct value **read_value, const gdb_byte *writebuf)
9225 {
9226 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9227 struct type *func_type = function ? function->type () : NULL;
9228 enum arm_vfp_cprc_base_type vfp_base_type;
9229 int vfp_base_count;
9230
9231 if (arm_vfp_abi_for_function (gdbarch, func_type)
9232 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9233 {
9234 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9235 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9236 int i;
9237
9238 gdb_byte *readbuf = nullptr;
9239 if (read_value != nullptr)
9240 {
9241 *read_value = value::allocate (valtype);
9242 readbuf = (*read_value)->contents_raw ().data ();
9243 }
9244
9245 for (i = 0; i < vfp_base_count; i++)
9246 {
9247 if (reg_char == 'q')
9248 {
9249 if (writebuf)
9250 arm_neon_quad_write (gdbarch, regcache, i,
9251 writebuf + i * unit_length);
9252
9253 if (readbuf)
9254 arm_neon_quad_read (gdbarch, regcache, i,
9255 readbuf + i * unit_length);
9256 }
9257 else
9258 {
9259 char name_buf[4];
9260 int regnum;
9261
9262 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9263 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9264 strlen (name_buf));
9265 if (writebuf)
9266 regcache->cooked_write (regnum, writebuf + i * unit_length);
9267 if (readbuf)
9268 regcache->cooked_read (regnum, readbuf + i * unit_length);
9269 }
9270 }
9271 return RETURN_VALUE_REGISTER_CONVENTION;
9272 }
9273
9274 if (valtype->code () == TYPE_CODE_STRUCT
9275 || valtype->code () == TYPE_CODE_UNION
9276 || valtype->code () == TYPE_CODE_ARRAY)
9277 {
9278 /* From the AAPCS document:
9279
9280 Result return:
9281
9282 A Composite Type larger than 4 bytes, or whose size cannot be
9283 determined statically by both caller and callee, is stored in memory
9284 at an address passed as an extra argument when the function was
9285 called (Parameter Passing, rule A.4). The memory to be used for the
9286 result may be modified at any point during the function call.
9287
9288 Parameter Passing:
9289
9290 A.4: If the subroutine is a function that returns a result in memory,
9291 then the address for the result is placed in r0 and the NCRN is set
9292 to r1. */
9293 if (tdep->struct_return == pcc_struct_return
9294 || arm_return_in_memory (gdbarch, valtype))
9295 {
9296 if (read_value != nullptr)
9297 {
9298 CORE_ADDR addr;
9299
9300 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9301 *read_value = value_at_non_lval (valtype, addr);
9302 }
9303 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9304 }
9305 }
9306 else if (valtype->code () == TYPE_CODE_COMPLEX)
9307 {
9308 if (arm_return_in_memory (gdbarch, valtype))
9309 return RETURN_VALUE_STRUCT_CONVENTION;
9310 }
9311
9312 if (writebuf)
9313 arm_store_return_value (valtype, regcache, writebuf);
9314
9315 if (read_value != nullptr)
9316 {
9317 *read_value = value::allocate (valtype);
9318 gdb_byte *readbuf = (*read_value)->contents_raw ().data ();
9319 arm_extract_return_value (valtype, regcache, readbuf);
9320 }
9321
9322 return RETURN_VALUE_REGISTER_CONVENTION;
9323 }
9324
9325
9326 static int
9327 arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc)
9328 {
9329 struct gdbarch *gdbarch = get_frame_arch (frame);
9330 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9331 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9332 CORE_ADDR jb_addr;
9333 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9334
9335 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9336
9337 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9338 ARM_INT_REGISTER_SIZE))
9339 return 0;
9340
9341 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9342 return 1;
9343 }
9344 /* A call to cmse secure entry function "foo" at "a" is modified by
9345 GNU ld as "b".
9346 a) bl xxxx <foo>
9347
9348 <foo>
9349 xxxx:
9350
9351 b) bl yyyy <__acle_se_foo>
9352
9353 section .gnu.sgstubs:
9354 <foo>
9355 yyyy: sg // secure gateway
9356 b.w xxxx <__acle_se_foo> // original_branch_dest
9357
9358 <__acle_se_foo>
9359 xxxx:
9360
9361 When the control at "b", the pc contains "yyyy" (sg address) which is a
9362 trampoline and does not exist in source code. This function returns the
9363 target pc "xxxx". For more details please refer to section 5.4
9364 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9365 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9366 document on www.developer.arm.com. */
9367
9368 static CORE_ADDR
9369 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9370 {
9371 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9372 char *target_name = (char *) alloca (target_len);
9373 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9374
9375 struct bound_minimal_symbol minsym
9376 = lookup_minimal_symbol (target_name, NULL, objfile);
9377
9378 if (minsym.minsym != nullptr)
9379 return minsym.value_address ();
9380
9381 return 0;
9382 }
9383
9384 /* Return true when SEC points to ".gnu.sgstubs" section. */
9385
9386 static bool
9387 arm_is_sgstubs_section (struct obj_section *sec)
9388 {
9389 return (sec != nullptr
9390 && sec->the_bfd_section != nullptr
9391 && sec->the_bfd_section->name != nullptr
9392 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9393 }
9394
9395 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9396 return the target PC. Otherwise return 0. */
9397
9398 CORE_ADDR
9399 arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc)
9400 {
9401 const char *name;
9402 int namelen;
9403 CORE_ADDR start_addr;
9404
9405 /* Find the starting address and name of the function containing the PC. */
9406 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9407 {
9408 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9409 check here. */
9410 start_addr = arm_skip_bx_reg (frame, pc);
9411 if (start_addr != 0)
9412 return start_addr;
9413
9414 return 0;
9415 }
9416
9417 /* If PC is in a Thumb call or return stub, return the address of the
9418 target PC, which is in a register. The thunk functions are called
9419 _call_via_xx, where x is the register name. The possible names
9420 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9421 functions, named __ARM_call_via_r[0-7]. */
9422 if (startswith (name, "_call_via_")
9423 || startswith (name, "__ARM_call_via_"))
9424 {
9425 /* Use the name suffix to determine which register contains the
9426 target PC. */
9427 static const char *table[15] =
9428 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9429 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9430 };
9431 int regno;
9432 int offset = strlen (name) - 2;
9433
9434 for (regno = 0; regno <= 14; regno++)
9435 if (strcmp (&name[offset], table[regno]) == 0)
9436 return get_frame_register_unsigned (frame, regno);
9437 }
9438
9439 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9440 non-interworking calls to foo. We could decode the stubs
9441 to find the target but it's easier to use the symbol table. */
9442 namelen = strlen (name);
9443 if (name[0] == '_' && name[1] == '_'
9444 && ((namelen > 2 + strlen ("_from_thumb")
9445 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9446 || (namelen > 2 + strlen ("_from_arm")
9447 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9448 {
9449 char *target_name;
9450 int target_len = namelen - 2;
9451 struct bound_minimal_symbol minsym;
9452 struct objfile *objfile;
9453 struct obj_section *sec;
9454
9455 if (name[namelen - 1] == 'b')
9456 target_len -= strlen ("_from_thumb");
9457 else
9458 target_len -= strlen ("_from_arm");
9459
9460 target_name = (char *) alloca (target_len + 1);
9461 memcpy (target_name, name + 2, target_len);
9462 target_name[target_len] = '\0';
9463
9464 sec = find_pc_section (pc);
9465 objfile = (sec == NULL) ? NULL : sec->objfile;
9466 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9467 if (minsym.minsym != NULL)
9468 return minsym.value_address ();
9469 else
9470 return 0;
9471 }
9472
9473 struct obj_section *section = find_pc_section (pc);
9474
9475 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9476 if (arm_is_sgstubs_section (section))
9477 return arm_skip_cmse_entry (pc, name, section->objfile);
9478
9479 return 0; /* not a stub */
9480 }
9481
9482 static void
9483 arm_update_current_architecture (void)
9484 {
9485 /* If the current architecture is not ARM, we have nothing to do. */
9486 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9487 return;
9488
9489 /* Update the architecture. */
9490 gdbarch_info info;
9491 if (!gdbarch_update_p (info))
9492 internal_error (_("could not update architecture"));
9493 }
9494
9495 static void
9496 set_fp_model_sfunc (const char *args, int from_tty,
9497 struct cmd_list_element *c)
9498 {
9499 int fp_model;
9500
9501 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9502 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9503 {
9504 arm_fp_model = (enum arm_float_model) fp_model;
9505 break;
9506 }
9507
9508 if (fp_model == ARM_FLOAT_LAST)
9509 internal_error (_("Invalid fp model accepted: %s."),
9510 current_fp_model);
9511
9512 arm_update_current_architecture ();
9513 }
9514
9515 static void
9516 show_fp_model (struct ui_file *file, int from_tty,
9517 struct cmd_list_element *c, const char *value)
9518 {
9519 if (arm_fp_model == ARM_FLOAT_AUTO
9520 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9521 {
9522 arm_gdbarch_tdep *tdep
9523 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9524
9525 gdb_printf (file, _("\
9526 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9527 fp_model_strings[tdep->fp_model]);
9528 }
9529 else
9530 gdb_printf (file, _("\
9531 The current ARM floating point model is \"%s\".\n"),
9532 fp_model_strings[arm_fp_model]);
9533 }
9534
9535 static void
9536 arm_set_abi (const char *args, int from_tty,
9537 struct cmd_list_element *c)
9538 {
9539 int arm_abi;
9540
9541 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9542 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9543 {
9544 arm_abi_global = (enum arm_abi_kind) arm_abi;
9545 break;
9546 }
9547
9548 if (arm_abi == ARM_ABI_LAST)
9549 internal_error (_("Invalid ABI accepted: %s."),
9550 arm_abi_string);
9551
9552 arm_update_current_architecture ();
9553 }
9554
9555 static void
9556 arm_show_abi (struct ui_file *file, int from_tty,
9557 struct cmd_list_element *c, const char *value)
9558 {
9559 if (arm_abi_global == ARM_ABI_AUTO
9560 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9561 {
9562 arm_gdbarch_tdep *tdep
9563 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9564
9565 gdb_printf (file, _("\
9566 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9567 arm_abi_strings[tdep->arm_abi]);
9568 }
9569 else
9570 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9571 arm_abi_string);
9572 }
9573
9574 static void
9575 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9576 struct cmd_list_element *c, const char *value)
9577 {
9578 gdb_printf (file,
9579 _("The current execution mode assumed "
9580 "(when symbols are unavailable) is \"%s\".\n"),
9581 arm_fallback_mode_string);
9582 }
9583
9584 static void
9585 arm_show_force_mode (struct ui_file *file, int from_tty,
9586 struct cmd_list_element *c, const char *value)
9587 {
9588 gdb_printf (file,
9589 _("The current execution mode assumed "
9590 "(even when symbols are available) is \"%s\".\n"),
9591 arm_force_mode_string);
9592 }
9593
9594 static void
9595 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9596 struct cmd_list_element *c, const char *value)
9597 {
9598 gdb_printf (file,
9599 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9600 arm_unwind_secure_frames ? "on" : "off");
9601 }
9602
9603 /* If the user changes the register disassembly style used for info
9604 register and other commands, we have to also switch the style used
9605 in opcodes for disassembly output. This function is run in the "set
9606 arm disassembly" command, and does that. */
9607
9608 static void
9609 set_disassembly_style_sfunc (const char *args, int from_tty,
9610 struct cmd_list_element *c)
9611 {
9612 /* Convert the short style name into the long style name (eg, reg-names-*)
9613 before calling the generic set_disassembler_options() function. */
9614 std::string long_name = std::string ("reg-names-") + disassembly_style;
9615 set_disassembler_options (&long_name[0]);
9616 }
9617
9618 static void
9619 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9620 struct cmd_list_element *c, const char *value)
9621 {
9622 struct gdbarch *gdbarch = get_current_arch ();
9623 char *options = get_disassembler_options (gdbarch);
9624 const char *style = "";
9625 int len = 0;
9626 const char *opt;
9627
9628 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9629 if (startswith (opt, "reg-names-"))
9630 {
9631 style = &opt[strlen ("reg-names-")];
9632 len = strcspn (style, ",");
9633 }
9634
9635 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9636 }
9637 \f
9638 /* Return the ARM register name corresponding to register I. */
9639 static const char *
9640 arm_register_name (struct gdbarch *gdbarch, int i)
9641 {
9642 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9643
9644 if (is_s_pseudo (gdbarch, i))
9645 {
9646 static const char *const s_pseudo_names[] = {
9647 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9648 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9649 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9650 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9651 };
9652
9653 return s_pseudo_names[i - tdep->s_pseudo_base];
9654 }
9655
9656 if (is_q_pseudo (gdbarch, i))
9657 {
9658 static const char *const q_pseudo_names[] = {
9659 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9660 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9661 };
9662
9663 return q_pseudo_names[i - tdep->q_pseudo_base];
9664 }
9665
9666 if (is_mve_pseudo (gdbarch, i))
9667 return "p0";
9668
9669 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9670 if (is_pacbti_pseudo (gdbarch, i))
9671 return "";
9672
9673 if (i >= ARRAY_SIZE (arm_register_names))
9674 /* These registers are only supported on targets which supply
9675 an XML description. */
9676 return "";
9677
9678 /* Non-pseudo registers. */
9679 return arm_register_names[i];
9680 }
9681
9682 /* Test whether the coff symbol specific value corresponds to a Thumb
9683 function. */
9684
9685 static int
9686 coff_sym_is_thumb (int val)
9687 {
9688 return (val == C_THUMBEXT
9689 || val == C_THUMBSTAT
9690 || val == C_THUMBEXTFUNC
9691 || val == C_THUMBSTATFUNC
9692 || val == C_THUMBLABEL);
9693 }
9694
9695 /* arm_coff_make_msymbol_special()
9696 arm_elf_make_msymbol_special()
9697
9698 These functions test whether the COFF or ELF symbol corresponds to
9699 an address in thumb code, and set a "special" bit in a minimal
9700 symbol to indicate that it does. */
9701
9702 static void
9703 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9704 {
9705 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9706
9707 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9708 == ST_BRANCH_TO_THUMB)
9709 MSYMBOL_SET_SPECIAL (msym);
9710 }
9711
9712 static void
9713 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9714 {
9715 if (coff_sym_is_thumb (val))
9716 MSYMBOL_SET_SPECIAL (msym);
9717 }
9718
9719 static void
9720 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9721 asymbol *sym)
9722 {
9723 const char *name = bfd_asymbol_name (sym);
9724 struct arm_per_bfd *data;
9725 struct arm_mapping_symbol new_map_sym;
9726
9727 gdb_assert (name[0] == '$');
9728 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9729 return;
9730
9731 data = arm_bfd_data_key.get (objfile->obfd.get ());
9732 if (data == NULL)
9733 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9734 objfile->obfd->section_count);
9735 arm_mapping_symbol_vec &map
9736 = data->section_maps[bfd_asymbol_section (sym)->index];
9737
9738 new_map_sym.value = sym->value;
9739 new_map_sym.type = name[1];
9740
9741 /* Insert at the end, the vector will be sorted on first use. */
9742 map.push_back (new_map_sym);
9743 }
9744
9745 static void
9746 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9747 {
9748 struct gdbarch *gdbarch = regcache->arch ();
9749 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9750
9751 /* If necessary, set the T bit. */
9752 if (arm_apcs_32)
9753 {
9754 ULONGEST val, t_bit;
9755 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9756 t_bit = arm_psr_thumb_bit (gdbarch);
9757 if (arm_pc_is_thumb (gdbarch, pc))
9758 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9759 val | t_bit);
9760 else
9761 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9762 val & ~t_bit);
9763 }
9764 }
9765
9766 /* Read the contents of a NEON quad register, by reading from two
9767 double registers. This is used to implement the quad pseudo
9768 registers, and for argument passing in case the quad registers are
9769 missing; vectors are passed in quad registers when using the VFP
9770 ABI, even if a NEON unit is not present. REGNUM is the index of
9771 the quad register, in [0, 15]. */
9772
9773 static enum register_status
9774 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9775 int regnum, gdb_byte *buf)
9776 {
9777 char name_buf[4];
9778 gdb_byte reg_buf[8];
9779 int offset, double_regnum;
9780 enum register_status status;
9781
9782 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9783 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9784 strlen (name_buf));
9785
9786 /* d0 is always the least significant half of q0. */
9787 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9788 offset = 8;
9789 else
9790 offset = 0;
9791
9792 status = regcache->raw_read (double_regnum, reg_buf);
9793 if (status != REG_VALID)
9794 return status;
9795 memcpy (buf + offset, reg_buf, 8);
9796
9797 offset = 8 - offset;
9798 status = regcache->raw_read (double_regnum + 1, reg_buf);
9799 if (status != REG_VALID)
9800 return status;
9801 memcpy (buf + offset, reg_buf, 8);
9802
9803 return REG_VALID;
9804 }
9805
9806 /* Read the contents of the MVE pseudo register REGNUM and store it
9807 in BUF. */
9808
9809 static enum register_status
9810 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9811 int regnum, gdb_byte *buf)
9812 {
9813 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9814
9815 /* P0 is the first 16 bits of VPR. */
9816 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9817 }
9818
9819 static enum register_status
9820 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9821 int regnum, gdb_byte *buf)
9822 {
9823 const int num_regs = gdbarch_num_regs (gdbarch);
9824 char name_buf[4];
9825 gdb_byte reg_buf[8];
9826 int offset, double_regnum;
9827 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9828
9829 gdb_assert (regnum >= num_regs);
9830
9831 if (is_q_pseudo (gdbarch, regnum))
9832 {
9833 /* Quad-precision register. */
9834 return arm_neon_quad_read (gdbarch, regcache,
9835 regnum - tdep->q_pseudo_base, buf);
9836 }
9837 else if (is_mve_pseudo (gdbarch, regnum))
9838 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9839 else
9840 {
9841 enum register_status status;
9842
9843 regnum -= tdep->s_pseudo_base;
9844 /* Single-precision register. */
9845 gdb_assert (regnum < 32);
9846
9847 /* s0 is always the least significant half of d0. */
9848 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9849 offset = (regnum & 1) ? 0 : 4;
9850 else
9851 offset = (regnum & 1) ? 4 : 0;
9852
9853 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9854 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9855 strlen (name_buf));
9856
9857 status = regcache->raw_read (double_regnum, reg_buf);
9858 if (status == REG_VALID)
9859 memcpy (buf, reg_buf + offset, 4);
9860 return status;
9861 }
9862 }
9863
9864 /* Store the contents of BUF to a NEON quad register, by writing to
9865 two double registers. This is used to implement the quad pseudo
9866 registers, and for argument passing in case the quad registers are
9867 missing; vectors are passed in quad registers when using the VFP
9868 ABI, even if a NEON unit is not present. REGNUM is the index
9869 of the quad register, in [0, 15]. */
9870
9871 static void
9872 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9873 int regnum, const gdb_byte *buf)
9874 {
9875 char name_buf[4];
9876 int offset, double_regnum;
9877
9878 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9879 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9880 strlen (name_buf));
9881
9882 /* d0 is always the least significant half of q0. */
9883 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9884 offset = 8;
9885 else
9886 offset = 0;
9887
9888 regcache->raw_write (double_regnum, buf + offset);
9889 offset = 8 - offset;
9890 regcache->raw_write (double_regnum + 1, buf + offset);
9891 }
9892
9893 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9894
9895 static void
9896 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9897 int regnum, const gdb_byte *buf)
9898 {
9899 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9900
9901 /* P0 is the first 16 bits of VPR. */
9902 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9903 }
9904
9905 static void
9906 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9907 int regnum, const gdb_byte *buf)
9908 {
9909 const int num_regs = gdbarch_num_regs (gdbarch);
9910 char name_buf[4];
9911 gdb_byte reg_buf[8];
9912 int offset, double_regnum;
9913 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9914
9915 gdb_assert (regnum >= num_regs);
9916
9917 if (is_q_pseudo (gdbarch, regnum))
9918 {
9919 /* Quad-precision register. */
9920 arm_neon_quad_write (gdbarch, regcache,
9921 regnum - tdep->q_pseudo_base, buf);
9922 }
9923 else if (is_mve_pseudo (gdbarch, regnum))
9924 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9925 else
9926 {
9927 regnum -= tdep->s_pseudo_base;
9928 /* Single-precision register. */
9929 gdb_assert (regnum < 32);
9930
9931 /* s0 is always the least significant half of d0. */
9932 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9933 offset = (regnum & 1) ? 0 : 4;
9934 else
9935 offset = (regnum & 1) ? 4 : 0;
9936
9937 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9938 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9939 strlen (name_buf));
9940
9941 regcache->raw_read (double_regnum, reg_buf);
9942 memcpy (reg_buf + offset, buf, 4);
9943 regcache->raw_write (double_regnum, reg_buf);
9944 }
9945 }
9946
9947 static struct value *
9948 value_of_arm_user_reg (frame_info_ptr frame, const void *baton)
9949 {
9950 const int *reg_p = (const int *) baton;
9951 return value_of_register (*reg_p, frame);
9952 }
9953 \f
9954 static enum gdb_osabi
9955 arm_elf_osabi_sniffer (bfd *abfd)
9956 {
9957 unsigned int elfosabi;
9958 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9959
9960 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9961
9962 if (elfosabi == ELFOSABI_ARM)
9963 /* GNU tools use this value. Check note sections in this case,
9964 as well. */
9965 {
9966 for (asection *sect : gdb_bfd_sections (abfd))
9967 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9968 }
9969
9970 /* Anything else will be handled by the generic ELF sniffer. */
9971 return osabi;
9972 }
9973
9974 static int
9975 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9976 const struct reggroup *group)
9977 {
9978 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9979 this, FPS register belongs to save_regroup, restore_reggroup, and
9980 all_reggroup, of course. */
9981 if (regnum == ARM_FPS_REGNUM)
9982 return (group == float_reggroup
9983 || group == save_reggroup
9984 || group == restore_reggroup
9985 || group == all_reggroup);
9986 else
9987 return default_register_reggroup_p (gdbarch, regnum, group);
9988 }
9989
9990 /* For backward-compatibility we allow two 'g' packet lengths with
9991 the remote protocol depending on whether FPA registers are
9992 supplied. M-profile targets do not have FPA registers, but some
9993 stubs already exist in the wild which use a 'g' packet which
9994 supplies them albeit with dummy values. The packet format which
9995 includes FPA registers should be considered deprecated for
9996 M-profile targets. */
9997
9998 static void
9999 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
10000 {
10001 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10002
10003 if (tdep->is_m)
10004 {
10005 const target_desc *tdesc;
10006
10007 /* If we know from the executable this is an M-profile target,
10008 cater for remote targets whose register set layout is the
10009 same as the FPA layout. */
10010 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
10011 register_remote_g_packet_guess (gdbarch,
10012 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
10013 tdesc);
10014
10015 /* The regular M-profile layout. */
10016 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
10017 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
10018 tdesc);
10019
10020 /* M-profile plus M4F VFP. */
10021 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
10022 register_remote_g_packet_guess (gdbarch,
10023 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
10024 tdesc);
10025 /* M-profile plus MVE. */
10026 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
10027 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
10028 + ARM_VFP2_REGS_SIZE
10029 + ARM_INT_REGISTER_SIZE, tdesc);
10030
10031 /* M-profile system (stack pointers). */
10032 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
10033 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
10034 }
10035
10036 /* Otherwise we don't have a useful guess. */
10037 }
10038
10039 /* Implement the code_of_frame_writable gdbarch method. */
10040
10041 static int
10042 arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame)
10043 {
10044 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10045
10046 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
10047 {
10048 /* M-profile exception frames return to some magic PCs, where
10049 isn't writable at all. */
10050 return 0;
10051 }
10052 else
10053 return 1;
10054 }
10055
10056 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
10057 to be postfixed by a version (eg armv7hl). */
10058
10059 static const char *
10060 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
10061 {
10062 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
10063 return "arm(v[^- ]*)?";
10064 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
10065 }
10066
10067 /* Implement the "get_pc_address_flags" gdbarch method. */
10068
10069 static std::string
10070 arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc)
10071 {
10072 if (get_frame_pc_masked (frame))
10073 return "PAC";
10074
10075 return "";
10076 }
10077
10078 /* Initialize the current architecture based on INFO. If possible,
10079 re-use an architecture from ARCHES, which is a list of
10080 architectures already created during this debugging session.
10081
10082 Called e.g. at program startup, when reading a core file, and when
10083 reading a binary file. */
10084
10085 static struct gdbarch *
10086 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
10087 {
10088 struct gdbarch_list *best_arch;
10089 enum arm_abi_kind arm_abi = arm_abi_global;
10090 enum arm_float_model fp_model = arm_fp_model;
10091 tdesc_arch_data_up tdesc_data;
10092 int i;
10093 bool is_m = false;
10094 bool have_sec_ext = false;
10095 int vfp_register_count = 0;
10096 bool have_s_pseudos = false, have_q_pseudos = false;
10097 bool have_wmmx_registers = false;
10098 bool have_neon = false;
10099 bool have_fpa_registers = true;
10100 const struct target_desc *tdesc = info.target_desc;
10101 bool have_vfp = false;
10102 bool have_mve = false;
10103 bool have_pacbti = false;
10104 int mve_vpr_regnum = -1;
10105 int register_count = ARM_NUM_REGS;
10106 bool have_m_profile_msp = false;
10107 int m_profile_msp_regnum = -1;
10108 int m_profile_psp_regnum = -1;
10109 int m_profile_msp_ns_regnum = -1;
10110 int m_profile_psp_ns_regnum = -1;
10111 int m_profile_msp_s_regnum = -1;
10112 int m_profile_psp_s_regnum = -1;
10113 int tls_regnum = 0;
10114
10115 /* If we have an object to base this architecture on, try to determine
10116 its ABI. */
10117
10118 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10119 {
10120 int ei_osabi, e_flags;
10121
10122 switch (bfd_get_flavour (info.abfd))
10123 {
10124 case bfd_target_coff_flavour:
10125 /* Assume it's an old APCS-style ABI. */
10126 /* XXX WinCE? */
10127 arm_abi = ARM_ABI_APCS;
10128 break;
10129
10130 case bfd_target_elf_flavour:
10131 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10132 e_flags = elf_elfheader (info.abfd)->e_flags;
10133
10134 if (ei_osabi == ELFOSABI_ARM)
10135 {
10136 /* GNU tools used to use this value, but do not for EABI
10137 objects. There's nowhere to tag an EABI version
10138 anyway, so assume APCS. */
10139 arm_abi = ARM_ABI_APCS;
10140 }
10141 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10142 {
10143 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10144
10145 switch (eabi_ver)
10146 {
10147 case EF_ARM_EABI_UNKNOWN:
10148 /* Assume GNU tools. */
10149 arm_abi = ARM_ABI_APCS;
10150 break;
10151
10152 case EF_ARM_EABI_VER4:
10153 case EF_ARM_EABI_VER5:
10154 arm_abi = ARM_ABI_AAPCS;
10155 /* EABI binaries default to VFP float ordering.
10156 They may also contain build attributes that can
10157 be used to identify if the VFP argument-passing
10158 ABI is in use. */
10159 if (fp_model == ARM_FLOAT_AUTO)
10160 {
10161 #ifdef HAVE_ELF
10162 switch (bfd_elf_get_obj_attr_int (info.abfd,
10163 OBJ_ATTR_PROC,
10164 Tag_ABI_VFP_args))
10165 {
10166 case AEABI_VFP_args_base:
10167 /* "The user intended FP parameter/result
10168 passing to conform to AAPCS, base
10169 variant". */
10170 fp_model = ARM_FLOAT_SOFT_VFP;
10171 break;
10172 case AEABI_VFP_args_vfp:
10173 /* "The user intended FP parameter/result
10174 passing to conform to AAPCS, VFP
10175 variant". */
10176 fp_model = ARM_FLOAT_VFP;
10177 break;
10178 case AEABI_VFP_args_toolchain:
10179 /* "The user intended FP parameter/result
10180 passing to conform to tool chain-specific
10181 conventions" - we don't know any such
10182 conventions, so leave it as "auto". */
10183 break;
10184 case AEABI_VFP_args_compatible:
10185 /* "Code is compatible with both the base
10186 and VFP variants; the user did not permit
10187 non-variadic functions to pass FP
10188 parameters/results" - leave it as
10189 "auto". */
10190 break;
10191 default:
10192 /* Attribute value not mentioned in the
10193 November 2012 ABI, so leave it as
10194 "auto". */
10195 break;
10196 }
10197 #else
10198 fp_model = ARM_FLOAT_SOFT_VFP;
10199 #endif
10200 }
10201 break;
10202
10203 default:
10204 /* Leave it as "auto". */
10205 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10206 break;
10207 }
10208
10209 #ifdef HAVE_ELF
10210 /* Detect M-profile programs. This only works if the
10211 executable file includes build attributes; GCC does
10212 copy them to the executable, but e.g. RealView does
10213 not. */
10214 int attr_arch
10215 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10216 Tag_CPU_arch);
10217 int attr_profile
10218 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10219 Tag_CPU_arch_profile);
10220
10221 /* GCC specifies the profile for v6-M; RealView only
10222 specifies the profile for architectures starting with
10223 V7 (as opposed to architectures with a tag
10224 numerically greater than TAG_CPU_ARCH_V7). */
10225 if (!tdesc_has_registers (tdesc)
10226 && (attr_arch == TAG_CPU_ARCH_V6_M
10227 || attr_arch == TAG_CPU_ARCH_V6S_M
10228 || attr_arch == TAG_CPU_ARCH_V7E_M
10229 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10230 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10231 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10232 || attr_profile == 'M'))
10233 is_m = true;
10234
10235 /* Look for attributes that indicate support for ARMv8.1-m
10236 PACBTI. */
10237 if (!tdesc_has_registers (tdesc) && is_m)
10238 {
10239 int attr_pac_extension
10240 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10241 Tag_PAC_extension);
10242
10243 int attr_bti_extension
10244 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10245 Tag_BTI_extension);
10246
10247 int attr_pacret_use
10248 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10249 Tag_PACRET_use);
10250
10251 int attr_bti_use
10252 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10253 Tag_BTI_use);
10254
10255 if (attr_pac_extension != 0 || attr_bti_extension != 0
10256 || attr_pacret_use != 0 || attr_bti_use != 0)
10257 have_pacbti = true;
10258 }
10259 #endif
10260 }
10261
10262 if (fp_model == ARM_FLOAT_AUTO)
10263 {
10264 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10265 {
10266 case 0:
10267 /* Leave it as "auto". Strictly speaking this case
10268 means FPA, but almost nobody uses that now, and
10269 many toolchains fail to set the appropriate bits
10270 for the floating-point model they use. */
10271 break;
10272 case EF_ARM_SOFT_FLOAT:
10273 fp_model = ARM_FLOAT_SOFT_FPA;
10274 break;
10275 case EF_ARM_VFP_FLOAT:
10276 fp_model = ARM_FLOAT_VFP;
10277 break;
10278 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10279 fp_model = ARM_FLOAT_SOFT_VFP;
10280 break;
10281 }
10282 }
10283
10284 if (e_flags & EF_ARM_BE8)
10285 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10286
10287 break;
10288
10289 default:
10290 /* Leave it as "auto". */
10291 break;
10292 }
10293 }
10294
10295 /* Check any target description for validity. */
10296 if (tdesc_has_registers (tdesc))
10297 {
10298 /* For most registers we require GDB's default names; but also allow
10299 the numeric names for sp / lr / pc, as a convenience. */
10300 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10301 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10302 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10303
10304 const struct tdesc_feature *feature;
10305 int valid_p;
10306
10307 feature = tdesc_find_feature (tdesc,
10308 "org.gnu.gdb.arm.core");
10309 if (feature == NULL)
10310 {
10311 feature = tdesc_find_feature (tdesc,
10312 "org.gnu.gdb.arm.m-profile");
10313 if (feature == NULL)
10314 return NULL;
10315 else
10316 is_m = true;
10317 }
10318
10319 tdesc_data = tdesc_data_alloc ();
10320
10321 valid_p = 1;
10322 for (i = 0; i < ARM_SP_REGNUM; i++)
10323 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10324 arm_register_names[i]);
10325 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10326 ARM_SP_REGNUM,
10327 arm_sp_names);
10328 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10329 ARM_LR_REGNUM,
10330 arm_lr_names);
10331 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10332 ARM_PC_REGNUM,
10333 arm_pc_names);
10334 if (is_m)
10335 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10336 ARM_PS_REGNUM, "xpsr");
10337 else
10338 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10339 ARM_PS_REGNUM, "cpsr");
10340
10341 if (!valid_p)
10342 return NULL;
10343
10344 if (is_m)
10345 {
10346 feature = tdesc_find_feature (tdesc,
10347 "org.gnu.gdb.arm.m-system");
10348 if (feature != nullptr)
10349 {
10350 /* MSP */
10351 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10352 register_count, "msp");
10353 if (!valid_p)
10354 {
10355 warning (_("M-profile m-system feature is missing required register msp."));
10356 return nullptr;
10357 }
10358 have_m_profile_msp = true;
10359 m_profile_msp_regnum = register_count++;
10360
10361 /* PSP */
10362 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10363 register_count, "psp");
10364 if (!valid_p)
10365 {
10366 warning (_("M-profile m-system feature is missing required register psp."));
10367 return nullptr;
10368 }
10369 m_profile_psp_regnum = register_count++;
10370 }
10371 }
10372
10373 feature = tdesc_find_feature (tdesc,
10374 "org.gnu.gdb.arm.fpa");
10375 if (feature != NULL)
10376 {
10377 valid_p = 1;
10378 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10379 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10380 arm_register_names[i]);
10381 if (!valid_p)
10382 return NULL;
10383 }
10384 else
10385 have_fpa_registers = false;
10386
10387 feature = tdesc_find_feature (tdesc,
10388 "org.gnu.gdb.xscale.iwmmxt");
10389 if (feature != NULL)
10390 {
10391 static const char *const iwmmxt_names[] = {
10392 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10393 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10394 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10395 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10396 };
10397
10398 valid_p = 1;
10399 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10400 valid_p
10401 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10402 iwmmxt_names[i - ARM_WR0_REGNUM]);
10403
10404 /* Check for the control registers, but do not fail if they
10405 are missing. */
10406 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10407 tdesc_numbered_register (feature, tdesc_data.get (), i,
10408 iwmmxt_names[i - ARM_WR0_REGNUM]);
10409
10410 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10411 valid_p
10412 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10413 iwmmxt_names[i - ARM_WR0_REGNUM]);
10414
10415 if (!valid_p)
10416 return NULL;
10417
10418 have_wmmx_registers = true;
10419 }
10420
10421 /* If we have a VFP unit, check whether the single precision registers
10422 are present. If not, then we will synthesize them as pseudo
10423 registers. */
10424 feature = tdesc_find_feature (tdesc,
10425 "org.gnu.gdb.arm.vfp");
10426 if (feature != NULL)
10427 {
10428 static const char *const vfp_double_names[] = {
10429 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10430 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10431 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10432 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10433 };
10434
10435 /* Require the double precision registers. There must be either
10436 16 or 32. */
10437 valid_p = 1;
10438 for (i = 0; i < 32; i++)
10439 {
10440 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10441 ARM_D0_REGNUM + i,
10442 vfp_double_names[i]);
10443 if (!valid_p)
10444 break;
10445 }
10446 if (!valid_p && i == 16)
10447 valid_p = 1;
10448
10449 /* Also require FPSCR. */
10450 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10451 ARM_FPSCR_REGNUM, "fpscr");
10452 if (!valid_p)
10453 return NULL;
10454
10455 have_vfp = true;
10456
10457 if (tdesc_unnumbered_register (feature, "s0") == 0)
10458 have_s_pseudos = true;
10459
10460 vfp_register_count = i;
10461
10462 /* If we have VFP, also check for NEON. The architecture allows
10463 NEON without VFP (integer vector operations only), but GDB
10464 does not support that. */
10465 feature = tdesc_find_feature (tdesc,
10466 "org.gnu.gdb.arm.neon");
10467 if (feature != NULL)
10468 {
10469 /* NEON requires 32 double-precision registers. */
10470 if (i != 32)
10471 return NULL;
10472
10473 /* If there are quad registers defined by the stub, use
10474 their type; otherwise (normally) provide them with
10475 the default type. */
10476 if (tdesc_unnumbered_register (feature, "q0") == 0)
10477 have_q_pseudos = true;
10478 }
10479 }
10480
10481 /* Check for the TLS register feature. */
10482 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10483 if (feature != nullptr)
10484 {
10485 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10486 register_count, "tpidruro");
10487 if (!valid_p)
10488 return nullptr;
10489
10490 tls_regnum = register_count;
10491 register_count++;
10492 }
10493
10494 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10495 MVE (Helium) is an M-profile extension. */
10496 if (is_m)
10497 {
10498 /* Do we have the MVE feature? */
10499 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10500
10501 if (feature != nullptr)
10502 {
10503 /* If we have MVE, we must always have the VPR register. */
10504 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10505 register_count, "vpr");
10506 if (!valid_p)
10507 {
10508 warning (_("MVE feature is missing required register vpr."));
10509 return nullptr;
10510 }
10511
10512 have_mve = true;
10513 mve_vpr_regnum = register_count;
10514 register_count++;
10515
10516 /* We can't have Q pseudo registers available here, as that
10517 would mean we have NEON features, and that is only available
10518 on A and R profiles. */
10519 gdb_assert (!have_q_pseudos);
10520
10521 /* Given we have a M-profile target description, if MVE is
10522 enabled and there are VFP registers, we should have Q
10523 pseudo registers (Q0 ~ Q7). */
10524 if (have_vfp)
10525 have_q_pseudos = true;
10526 }
10527
10528 /* Do we have the ARMv8.1-m PACBTI feature? */
10529 feature = tdesc_find_feature (tdesc,
10530 "org.gnu.gdb.arm.m-profile-pacbti");
10531 if (feature != nullptr)
10532 {
10533 /* By advertising this feature, the target acknowledges the
10534 presence of the ARMv8.1-m PACBTI extensions.
10535
10536 We don't care for any particular registers in this group, so
10537 the target is free to include whatever it deems appropriate.
10538
10539 The expectation is for this feature to include the PAC
10540 keys. */
10541 have_pacbti = true;
10542 }
10543
10544 /* Do we have the Security extension? */
10545 feature = tdesc_find_feature (tdesc,
10546 "org.gnu.gdb.arm.secext");
10547 if (feature != nullptr)
10548 {
10549 /* Secure/Non-secure stack pointers. */
10550 /* MSP_NS */
10551 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10552 register_count, "msp_ns");
10553 if (!valid_p)
10554 {
10555 warning (_("M-profile secext feature is missing required register msp_ns."));
10556 return nullptr;
10557 }
10558 m_profile_msp_ns_regnum = register_count++;
10559
10560 /* PSP_NS */
10561 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10562 register_count, "psp_ns");
10563 if (!valid_p)
10564 {
10565 warning (_("M-profile secext feature is missing required register psp_ns."));
10566 return nullptr;
10567 }
10568 m_profile_psp_ns_regnum = register_count++;
10569
10570 /* MSP_S */
10571 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10572 register_count, "msp_s");
10573 if (!valid_p)
10574 {
10575 warning (_("M-profile secext feature is missing required register msp_s."));
10576 return nullptr;
10577 }
10578 m_profile_msp_s_regnum = register_count++;
10579
10580 /* PSP_S */
10581 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10582 register_count, "psp_s");
10583 if (!valid_p)
10584 {
10585 warning (_("M-profile secext feature is missing required register psp_s."));
10586 return nullptr;
10587 }
10588 m_profile_psp_s_regnum = register_count++;
10589
10590 have_sec_ext = true;
10591 }
10592
10593 }
10594 }
10595
10596 /* If there is already a candidate, use it. */
10597 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10598 best_arch != NULL;
10599 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10600 {
10601 arm_gdbarch_tdep *tdep
10602 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10603
10604 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10605 continue;
10606
10607 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10608 continue;
10609
10610 /* There are various other properties in tdep that we do not
10611 need to check here: those derived from a target description,
10612 since gdbarches with a different target description are
10613 automatically disqualified. */
10614
10615 /* Do check is_m, though, since it might come from the binary. */
10616 if (is_m != tdep->is_m)
10617 continue;
10618
10619 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10620 the binary. */
10621 if (have_pacbti != tdep->have_pacbti)
10622 continue;
10623
10624 /* Found a match. */
10625 break;
10626 }
10627
10628 if (best_arch != NULL)
10629 return best_arch->gdbarch;
10630
10631 gdbarch *gdbarch
10632 = gdbarch_alloc (&info, gdbarch_tdep_up (new arm_gdbarch_tdep));
10633 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10634
10635 /* Record additional information about the architecture we are defining.
10636 These are gdbarch discriminators, like the OSABI. */
10637 tdep->arm_abi = arm_abi;
10638 tdep->fp_model = fp_model;
10639 tdep->is_m = is_m;
10640 tdep->have_sec_ext = have_sec_ext;
10641 tdep->have_fpa_registers = have_fpa_registers;
10642 tdep->have_wmmx_registers = have_wmmx_registers;
10643 gdb_assert (vfp_register_count == 0
10644 || vfp_register_count == 16
10645 || vfp_register_count == 32);
10646 tdep->vfp_register_count = vfp_register_count;
10647 tdep->have_s_pseudos = have_s_pseudos;
10648 tdep->have_q_pseudos = have_q_pseudos;
10649 tdep->have_neon = have_neon;
10650 tdep->tls_regnum = tls_regnum;
10651
10652 /* Adjust the MVE feature settings. */
10653 if (have_mve)
10654 {
10655 tdep->have_mve = true;
10656 tdep->mve_vpr_regnum = mve_vpr_regnum;
10657 }
10658
10659 /* Adjust the PACBTI feature settings. */
10660 tdep->have_pacbti = have_pacbti;
10661
10662 /* Adjust the M-profile stack pointers settings. */
10663 if (have_m_profile_msp)
10664 {
10665 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10666 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10667 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10668 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10669 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10670 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10671 }
10672
10673 arm_register_g_packet_guesses (gdbarch);
10674
10675 /* Breakpoints. */
10676 switch (info.byte_order_for_code)
10677 {
10678 case BFD_ENDIAN_BIG:
10679 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10680 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10681 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10682 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10683
10684 break;
10685
10686 case BFD_ENDIAN_LITTLE:
10687 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10688 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10689 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10690 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10691
10692 break;
10693
10694 default:
10695 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10696 }
10697
10698 /* On ARM targets char defaults to unsigned. */
10699 set_gdbarch_char_signed (gdbarch, 0);
10700
10701 /* wchar_t is unsigned under the AAPCS. */
10702 if (tdep->arm_abi == ARM_ABI_AAPCS)
10703 set_gdbarch_wchar_signed (gdbarch, 0);
10704 else
10705 set_gdbarch_wchar_signed (gdbarch, 1);
10706
10707 /* Compute type alignment. */
10708 set_gdbarch_type_align (gdbarch, arm_type_align);
10709
10710 /* Note: for displaced stepping, this includes the breakpoint, and one word
10711 of additional scratch space. This setting isn't used for anything beside
10712 displaced stepping at present. */
10713 set_gdbarch_displaced_step_buffer_length
10714 (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10715 set_gdbarch_max_insn_length (gdbarch, 4);
10716
10717 /* This should be low enough for everything. */
10718 tdep->lowest_pc = 0x20;
10719 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10720
10721 /* The default, for both APCS and AAPCS, is to return small
10722 structures in registers. */
10723 tdep->struct_return = reg_struct_return;
10724
10725 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10726 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10727
10728 if (is_m)
10729 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10730
10731 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10732
10733 frame_base_set_default (gdbarch, &arm_normal_base);
10734
10735 /* Address manipulation. */
10736 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10737
10738 /* Advance PC across function entry code. */
10739 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10740
10741 /* Detect whether PC is at a point where the stack has been destroyed. */
10742 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10743
10744 /* Skip trampolines. */
10745 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10746
10747 /* The stack grows downward. */
10748 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10749
10750 /* Breakpoint manipulation. */
10751 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10752 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10753 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10754 arm_breakpoint_kind_from_current_state);
10755
10756 /* Information about registers, etc. */
10757 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10758 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10759 set_gdbarch_num_regs (gdbarch, register_count);
10760 set_gdbarch_register_type (gdbarch, arm_register_type);
10761 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10762
10763 /* This "info float" is FPA-specific. Use the generic version if we
10764 do not have FPA. */
10765 if (tdep->have_fpa_registers)
10766 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10767
10768 /* Internal <-> external register number maps. */
10769 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10770 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10771
10772 set_gdbarch_register_name (gdbarch, arm_register_name);
10773
10774 /* Returning results. */
10775 set_gdbarch_return_value_as_value (gdbarch, arm_return_value);
10776
10777 /* Disassembly. */
10778 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10779
10780 /* Minsymbol frobbing. */
10781 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10782 set_gdbarch_coff_make_msymbol_special (gdbarch,
10783 arm_coff_make_msymbol_special);
10784 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10785
10786 /* Thumb-2 IT block support. */
10787 set_gdbarch_adjust_breakpoint_address (gdbarch,
10788 arm_adjust_breakpoint_address);
10789
10790 /* Virtual tables. */
10791 set_gdbarch_vbit_in_delta (gdbarch, 1);
10792
10793 /* Hook in the ABI-specific overrides, if they have been registered. */
10794 gdbarch_init_osabi (info, gdbarch);
10795
10796 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10797
10798 /* Add some default predicates. */
10799 if (is_m)
10800 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10801 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10802 dwarf2_append_unwinders (gdbarch);
10803 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10804 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10805 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10806
10807 /* Now we have tuned the configuration, set a few final things,
10808 based on what the OS ABI has told us. */
10809
10810 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10811 binaries are always marked. */
10812 if (tdep->arm_abi == ARM_ABI_AUTO)
10813 tdep->arm_abi = ARM_ABI_APCS;
10814
10815 /* Watchpoints are not steppable. */
10816 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10817
10818 /* We used to default to FPA for generic ARM, but almost nobody
10819 uses that now, and we now provide a way for the user to force
10820 the model. So default to the most useful variant. */
10821 if (tdep->fp_model == ARM_FLOAT_AUTO)
10822 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10823
10824 if (tdep->jb_pc >= 0)
10825 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10826
10827 /* Floating point sizes and format. */
10828 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10829 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10830 {
10831 set_gdbarch_double_format
10832 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10833 set_gdbarch_long_double_format
10834 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10835 }
10836 else
10837 {
10838 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10839 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10840 }
10841
10842 /* Hook used to decorate frames with signed return addresses, only available
10843 for ARMv8.1-m PACBTI. */
10844 if (is_m && have_pacbti)
10845 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10846
10847 if (tdesc_data != nullptr)
10848 {
10849 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10850
10851 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10852 register_count = gdbarch_num_regs (gdbarch);
10853
10854 /* Override tdesc_register_type to adjust the types of VFP
10855 registers for NEON. */
10856 set_gdbarch_register_type (gdbarch, arm_register_type);
10857 }
10858
10859 /* Initialize the pseudo register data. */
10860 int num_pseudos = 0;
10861 if (tdep->have_s_pseudos)
10862 {
10863 /* VFP single precision pseudo registers (S0~S31). */
10864 tdep->s_pseudo_base = register_count;
10865 tdep->s_pseudo_count = 32;
10866 num_pseudos += tdep->s_pseudo_count;
10867
10868 if (tdep->have_q_pseudos)
10869 {
10870 /* NEON quad precision pseudo registers (Q0~Q15). */
10871 tdep->q_pseudo_base = register_count + num_pseudos;
10872
10873 if (have_neon)
10874 tdep->q_pseudo_count = 16;
10875 else if (have_mve)
10876 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10877
10878 num_pseudos += tdep->q_pseudo_count;
10879 }
10880 }
10881
10882 /* Do we have any MVE pseudo registers? */
10883 if (have_mve)
10884 {
10885 tdep->mve_pseudo_base = register_count + num_pseudos;
10886 tdep->mve_pseudo_count = 1;
10887 num_pseudos += tdep->mve_pseudo_count;
10888 }
10889
10890 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10891 if (have_pacbti)
10892 {
10893 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10894 tdep->pacbti_pseudo_count = 1;
10895 num_pseudos += tdep->pacbti_pseudo_count;
10896 }
10897
10898 /* Set some pseudo register hooks, if we have pseudo registers. */
10899 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10900 {
10901 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10902 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10903 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10904 }
10905
10906 /* Add standard register aliases. We add aliases even for those
10907 names which are used by the current architecture - it's simpler,
10908 and does no harm, since nothing ever lists user registers. */
10909 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10910 user_reg_add (gdbarch, arm_register_aliases[i].name,
10911 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10912
10913 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10914 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10915
10916 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10917
10918 return gdbarch;
10919 }
10920
10921 static void
10922 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10923 {
10924 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10925
10926 if (tdep == NULL)
10927 return;
10928
10929 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10930 (int) tdep->fp_model);
10931 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10932 (int) tdep->have_fpa_registers);
10933 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10934 (int) tdep->have_wmmx_registers);
10935 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10936 (int) tdep->vfp_register_count);
10937 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10938 tdep->have_s_pseudos ? "true" : "false");
10939 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10940 (int) tdep->s_pseudo_base);
10941 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10942 (int) tdep->s_pseudo_count);
10943 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10944 tdep->have_q_pseudos ? "true" : "false");
10945 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10946 (int) tdep->q_pseudo_base);
10947 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10948 (int) tdep->q_pseudo_count);
10949 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10950 (int) tdep->have_neon);
10951 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10952 tdep->have_mve ? "yes" : "no");
10953 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10954 tdep->mve_vpr_regnum);
10955 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10956 tdep->mve_pseudo_base);
10957 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10958 tdep->mve_pseudo_count);
10959 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10960 tdep->m_profile_msp_regnum);
10961 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10962 tdep->m_profile_psp_regnum);
10963 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10964 tdep->m_profile_msp_ns_regnum);
10965 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10966 tdep->m_profile_psp_ns_regnum);
10967 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10968 tdep->m_profile_msp_s_regnum);
10969 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10970 tdep->m_profile_psp_s_regnum);
10971 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10972 (unsigned long) tdep->lowest_pc);
10973 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10974 tdep->have_pacbti ? "yes" : "no");
10975 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10976 tdep->pacbti_pseudo_base);
10977 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10978 tdep->pacbti_pseudo_count);
10979 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10980 tdep->is_m ? "yes" : "no");
10981 }
10982
10983 #if GDB_SELF_TEST
10984 namespace selftests
10985 {
10986 static void arm_record_test (void);
10987 static void arm_analyze_prologue_test ();
10988 }
10989 #endif
10990
10991 void _initialize_arm_tdep ();
10992 void
10993 _initialize_arm_tdep ()
10994 {
10995 long length;
10996 int i, j;
10997 char regdesc[1024], *rdptr = regdesc;
10998 size_t rest = sizeof (regdesc);
10999
11000 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
11001
11002 /* Add ourselves to objfile event chain. */
11003 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
11004
11005 /* Register an ELF OS ABI sniffer for ARM binaries. */
11006 gdbarch_register_osabi_sniffer (bfd_arch_arm,
11007 bfd_target_elf_flavour,
11008 arm_elf_osabi_sniffer);
11009
11010 /* Add root prefix command for all "set arm"/"show arm" commands. */
11011 add_setshow_prefix_cmd ("arm", no_class,
11012 _("Various ARM-specific commands."),
11013 _("Various ARM-specific commands."),
11014 &setarmcmdlist, &showarmcmdlist,
11015 &setlist, &showlist);
11016
11017 arm_disassembler_options = xstrdup ("reg-names-std");
11018 const disasm_options_t *disasm_options
11019 = &disassembler_options_arm ()->options;
11020 int num_disassembly_styles = 0;
11021 for (i = 0; disasm_options->name[i] != NULL; i++)
11022 if (startswith (disasm_options->name[i], "reg-names-"))
11023 num_disassembly_styles++;
11024
11025 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
11026 valid_disassembly_styles = XNEWVEC (const char *,
11027 num_disassembly_styles + 1);
11028 for (i = j = 0; disasm_options->name[i] != NULL; i++)
11029 if (startswith (disasm_options->name[i], "reg-names-"))
11030 {
11031 size_t offset = strlen ("reg-names-");
11032 const char *style = disasm_options->name[i];
11033 valid_disassembly_styles[j++] = &style[offset];
11034 if (strcmp (&style[offset], "std") == 0)
11035 disassembly_style = &style[offset];
11036 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
11037 disasm_options->description[i]);
11038 rdptr += length;
11039 rest -= length;
11040 }
11041 /* Mark the end of valid options. */
11042 valid_disassembly_styles[num_disassembly_styles] = NULL;
11043
11044 /* Create the help text. */
11045 std::string helptext = string_printf ("%s%s%s",
11046 _("The valid values are:\n"),
11047 regdesc,
11048 _("The default is \"std\"."));
11049
11050 add_setshow_enum_cmd("disassembler", no_class,
11051 valid_disassembly_styles, &disassembly_style,
11052 _("Set the disassembly style."),
11053 _("Show the disassembly style."),
11054 helptext.c_str (),
11055 set_disassembly_style_sfunc,
11056 show_disassembly_style_sfunc,
11057 &setarmcmdlist, &showarmcmdlist);
11058
11059 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
11060 _("Set usage of ARM 32-bit mode."),
11061 _("Show usage of ARM 32-bit mode."),
11062 _("When off, a 26-bit PC will be used."),
11063 NULL,
11064 NULL, /* FIXME: i18n: Usage of ARM 32-bit
11065 mode is %s. */
11066 &setarmcmdlist, &showarmcmdlist);
11067
11068 /* Add a command to allow the user to force the FPU model. */
11069 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
11070 _("Set the floating point type."),
11071 _("Show the floating point type."),
11072 _("auto - Determine the FP typefrom the OS-ABI.\n\
11073 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
11074 fpa - FPA co-processor (GCC compiled).\n\
11075 softvfp - Software FP with pure-endian doubles.\n\
11076 vfp - VFP co-processor."),
11077 set_fp_model_sfunc, show_fp_model,
11078 &setarmcmdlist, &showarmcmdlist);
11079
11080 /* Add a command to allow the user to force the ABI. */
11081 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
11082 _("Set the ABI."),
11083 _("Show the ABI."),
11084 NULL, arm_set_abi, arm_show_abi,
11085 &setarmcmdlist, &showarmcmdlist);
11086
11087 /* Add two commands to allow the user to force the assumed
11088 execution mode. */
11089 add_setshow_enum_cmd ("fallback-mode", class_support,
11090 arm_mode_strings, &arm_fallback_mode_string,
11091 _("Set the mode assumed when symbols are unavailable."),
11092 _("Show the mode assumed when symbols are unavailable."),
11093 NULL, NULL, arm_show_fallback_mode,
11094 &setarmcmdlist, &showarmcmdlist);
11095 add_setshow_enum_cmd ("force-mode", class_support,
11096 arm_mode_strings, &arm_force_mode_string,
11097 _("Set the mode assumed even when symbols are available."),
11098 _("Show the mode assumed even when symbols are available."),
11099 NULL, NULL, arm_show_force_mode,
11100 &setarmcmdlist, &showarmcmdlist);
11101
11102 /* Add a command to stop triggering security exceptions when
11103 unwinding exception stacks. */
11104 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11105 _("Set usage of non-secure to secure exception stack unwinding."),
11106 _("Show usage of non-secure to secure exception stack unwinding."),
11107 _("When on, the debugger can trigger memory access traps."),
11108 NULL, arm_show_unwind_secure_frames,
11109 &setarmcmdlist, &showarmcmdlist);
11110
11111 /* Debugging flag. */
11112 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11113 _("Set ARM debugging."),
11114 _("Show ARM debugging."),
11115 _("When on, arm-specific debugging is enabled."),
11116 NULL,
11117 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11118 &setdebuglist, &showdebuglist);
11119
11120 #if GDB_SELF_TEST
11121 selftests::register_test ("arm-record", selftests::arm_record_test);
11122 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11123 #endif
11124
11125 }
11126
11127 /* ARM-reversible process record data structures. */
11128
11129 #define ARM_INSN_SIZE_BYTES 4
11130 #define THUMB_INSN_SIZE_BYTES 2
11131 #define THUMB2_INSN_SIZE_BYTES 4
11132
11133
11134 /* Position of the bit within a 32-bit ARM instruction
11135 that defines whether the instruction is a load or store. */
11136 #define INSN_S_L_BIT_NUM 20
11137
11138 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11139 do \
11140 { \
11141 unsigned int reg_len = LENGTH; \
11142 if (reg_len) \
11143 { \
11144 REGS = XNEWVEC (uint32_t, reg_len); \
11145 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11146 } \
11147 } \
11148 while (0)
11149
11150 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11151 do \
11152 { \
11153 unsigned int mem_len = LENGTH; \
11154 if (mem_len) \
11155 { \
11156 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11157 memcpy(&MEMS->len, &RECORD_BUF[0], \
11158 sizeof(struct arm_mem_r) * LENGTH); \
11159 } \
11160 } \
11161 while (0)
11162
11163 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11164 #define INSN_RECORDED(ARM_RECORD) \
11165 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11166
11167 /* ARM memory record structure. */
11168 struct arm_mem_r
11169 {
11170 uint32_t len; /* Record length. */
11171 uint32_t addr; /* Memory address. */
11172 };
11173
11174 /* ARM instruction record contains opcode of current insn
11175 and execution state (before entry to decode_insn()),
11176 contains list of to-be-modified registers and
11177 memory blocks (on return from decode_insn()). */
11178
11179 struct arm_insn_decode_record
11180 {
11181 struct gdbarch *gdbarch;
11182 struct regcache *regcache;
11183 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11184 uint32_t arm_insn; /* Should accommodate thumb. */
11185 uint32_t cond; /* Condition code. */
11186 uint32_t opcode; /* Insn opcode. */
11187 uint32_t decode; /* Insn decode bits. */
11188 uint32_t mem_rec_count; /* No of mem records. */
11189 uint32_t reg_rec_count; /* No of reg records. */
11190 uint32_t *arm_regs; /* Registers to be saved for this record. */
11191 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11192 };
11193
11194
11195 /* Checks ARM SBZ and SBO mandatory fields. */
11196
11197 static int
11198 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11199 {
11200 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11201
11202 if (!len)
11203 return 1;
11204
11205 if (!sbo)
11206 ones = ~ones;
11207
11208 while (ones)
11209 {
11210 if (!(ones & sbo))
11211 {
11212 return 0;
11213 }
11214 ones = ones >> 1;
11215 }
11216 return 1;
11217 }
11218
11219 enum arm_record_result
11220 {
11221 ARM_RECORD_SUCCESS = 0,
11222 ARM_RECORD_FAILURE = 1
11223 };
11224
11225 enum arm_record_strx_t
11226 {
11227 ARM_RECORD_STRH=1,
11228 ARM_RECORD_STRD
11229 };
11230
11231 enum record_type_t
11232 {
11233 ARM_RECORD=1,
11234 THUMB_RECORD,
11235 THUMB2_RECORD
11236 };
11237
11238
11239 static int
11240 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11241 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11242 {
11243
11244 struct regcache *reg_cache = arm_insn_r->regcache;
11245 ULONGEST u_regval[2]= {0};
11246
11247 uint32_t reg_src1 = 0, reg_src2 = 0;
11248 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11249
11250 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11251 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11252
11253 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11254 {
11255 /* 1) Handle misc store, immediate offset. */
11256 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11257 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11258 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11259 regcache_raw_read_unsigned (reg_cache, reg_src1,
11260 &u_regval[0]);
11261 if (ARM_PC_REGNUM == reg_src1)
11262 {
11263 /* If R15 was used as Rn, hence current PC+8. */
11264 u_regval[0] = u_regval[0] + 8;
11265 }
11266 offset_8 = (immed_high << 4) | immed_low;
11267 /* Calculate target store address. */
11268 if (14 == arm_insn_r->opcode)
11269 {
11270 tgt_mem_addr = u_regval[0] + offset_8;
11271 }
11272 else
11273 {
11274 tgt_mem_addr = u_regval[0] - offset_8;
11275 }
11276 if (ARM_RECORD_STRH == str_type)
11277 {
11278 record_buf_mem[0] = 2;
11279 record_buf_mem[1] = tgt_mem_addr;
11280 arm_insn_r->mem_rec_count = 1;
11281 }
11282 else if (ARM_RECORD_STRD == str_type)
11283 {
11284 record_buf_mem[0] = 4;
11285 record_buf_mem[1] = tgt_mem_addr;
11286 record_buf_mem[2] = 4;
11287 record_buf_mem[3] = tgt_mem_addr + 4;
11288 arm_insn_r->mem_rec_count = 2;
11289 }
11290 }
11291 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11292 {
11293 /* 2) Store, register offset. */
11294 /* Get Rm. */
11295 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11296 /* Get Rn. */
11297 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11298 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11299 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11300 if (15 == reg_src2)
11301 {
11302 /* If R15 was used as Rn, hence current PC+8. */
11303 u_regval[0] = u_regval[0] + 8;
11304 }
11305 /* Calculate target store address, Rn +/- Rm, register offset. */
11306 if (12 == arm_insn_r->opcode)
11307 {
11308 tgt_mem_addr = u_regval[0] + u_regval[1];
11309 }
11310 else
11311 {
11312 tgt_mem_addr = u_regval[1] - u_regval[0];
11313 }
11314 if (ARM_RECORD_STRH == str_type)
11315 {
11316 record_buf_mem[0] = 2;
11317 record_buf_mem[1] = tgt_mem_addr;
11318 arm_insn_r->mem_rec_count = 1;
11319 }
11320 else if (ARM_RECORD_STRD == str_type)
11321 {
11322 record_buf_mem[0] = 4;
11323 record_buf_mem[1] = tgt_mem_addr;
11324 record_buf_mem[2] = 4;
11325 record_buf_mem[3] = tgt_mem_addr + 4;
11326 arm_insn_r->mem_rec_count = 2;
11327 }
11328 }
11329 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11330 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11331 {
11332 /* 3) Store, immediate pre-indexed. */
11333 /* 5) Store, immediate post-indexed. */
11334 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11335 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11336 offset_8 = (immed_high << 4) | immed_low;
11337 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11338 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11339 /* Calculate target store address, Rn +/- Rm, register offset. */
11340 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11341 {
11342 tgt_mem_addr = u_regval[0] + offset_8;
11343 }
11344 else
11345 {
11346 tgt_mem_addr = u_regval[0] - offset_8;
11347 }
11348 if (ARM_RECORD_STRH == str_type)
11349 {
11350 record_buf_mem[0] = 2;
11351 record_buf_mem[1] = tgt_mem_addr;
11352 arm_insn_r->mem_rec_count = 1;
11353 }
11354 else if (ARM_RECORD_STRD == str_type)
11355 {
11356 record_buf_mem[0] = 4;
11357 record_buf_mem[1] = tgt_mem_addr;
11358 record_buf_mem[2] = 4;
11359 record_buf_mem[3] = tgt_mem_addr + 4;
11360 arm_insn_r->mem_rec_count = 2;
11361 }
11362 /* Record Rn also as it changes. */
11363 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11364 arm_insn_r->reg_rec_count = 1;
11365 }
11366 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11367 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11368 {
11369 /* 4) Store, register pre-indexed. */
11370 /* 6) Store, register post -indexed. */
11371 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11372 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11373 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11374 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11375 /* Calculate target store address, Rn +/- Rm, register offset. */
11376 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11377 {
11378 tgt_mem_addr = u_regval[0] + u_regval[1];
11379 }
11380 else
11381 {
11382 tgt_mem_addr = u_regval[1] - u_regval[0];
11383 }
11384 if (ARM_RECORD_STRH == str_type)
11385 {
11386 record_buf_mem[0] = 2;
11387 record_buf_mem[1] = tgt_mem_addr;
11388 arm_insn_r->mem_rec_count = 1;
11389 }
11390 else if (ARM_RECORD_STRD == str_type)
11391 {
11392 record_buf_mem[0] = 4;
11393 record_buf_mem[1] = tgt_mem_addr;
11394 record_buf_mem[2] = 4;
11395 record_buf_mem[3] = tgt_mem_addr + 4;
11396 arm_insn_r->mem_rec_count = 2;
11397 }
11398 /* Record Rn also as it changes. */
11399 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11400 arm_insn_r->reg_rec_count = 1;
11401 }
11402 return 0;
11403 }
11404
11405 /* Handling ARM extension space insns. */
11406
11407 static int
11408 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11409 {
11410 int ret = 0; /* Return value: -1:record failure ; 0:success */
11411 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11412 uint32_t record_buf[8], record_buf_mem[8];
11413 uint32_t reg_src1 = 0;
11414 struct regcache *reg_cache = arm_insn_r->regcache;
11415 ULONGEST u_regval = 0;
11416
11417 gdb_assert (!INSN_RECORDED(arm_insn_r));
11418 /* Handle unconditional insn extension space. */
11419
11420 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11421 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11422 if (arm_insn_r->cond)
11423 {
11424 /* PLD has no affect on architectural state, it just affects
11425 the caches. */
11426 if (5 == ((opcode1 & 0xE0) >> 5))
11427 {
11428 /* BLX(1) */
11429 record_buf[0] = ARM_PS_REGNUM;
11430 record_buf[1] = ARM_LR_REGNUM;
11431 arm_insn_r->reg_rec_count = 2;
11432 }
11433 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11434 }
11435
11436
11437 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11438 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11439 {
11440 ret = -1;
11441 /* Undefined instruction on ARM V5; need to handle if later
11442 versions define it. */
11443 }
11444
11445 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11446 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11447 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11448
11449 /* Handle arithmetic insn extension space. */
11450 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11451 && !INSN_RECORDED(arm_insn_r))
11452 {
11453 /* Handle MLA(S) and MUL(S). */
11454 if (in_inclusive_range (insn_op1, 0U, 3U))
11455 {
11456 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11457 record_buf[1] = ARM_PS_REGNUM;
11458 arm_insn_r->reg_rec_count = 2;
11459 }
11460 else if (in_inclusive_range (insn_op1, 4U, 15U))
11461 {
11462 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11463 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11464 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11465 record_buf[2] = ARM_PS_REGNUM;
11466 arm_insn_r->reg_rec_count = 3;
11467 }
11468 }
11469
11470 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11471 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11472 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11473
11474 /* Handle control insn extension space. */
11475
11476 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11477 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11478 {
11479 if (!bit (arm_insn_r->arm_insn,25))
11480 {
11481 if (!bits (arm_insn_r->arm_insn, 4, 7))
11482 {
11483 if ((0 == insn_op1) || (2 == insn_op1))
11484 {
11485 /* MRS. */
11486 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11487 arm_insn_r->reg_rec_count = 1;
11488 }
11489 else if (1 == insn_op1)
11490 {
11491 /* CSPR is going to be changed. */
11492 record_buf[0] = ARM_PS_REGNUM;
11493 arm_insn_r->reg_rec_count = 1;
11494 }
11495 else if (3 == insn_op1)
11496 {
11497 /* SPSR is going to be changed. */
11498 /* We need to get SPSR value, which is yet to be done. */
11499 return -1;
11500 }
11501 }
11502 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11503 {
11504 if (1 == insn_op1)
11505 {
11506 /* BX. */
11507 record_buf[0] = ARM_PS_REGNUM;
11508 arm_insn_r->reg_rec_count = 1;
11509 }
11510 else if (3 == insn_op1)
11511 {
11512 /* CLZ. */
11513 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11514 arm_insn_r->reg_rec_count = 1;
11515 }
11516 }
11517 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11518 {
11519 /* BLX. */
11520 record_buf[0] = ARM_PS_REGNUM;
11521 record_buf[1] = ARM_LR_REGNUM;
11522 arm_insn_r->reg_rec_count = 2;
11523 }
11524 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11525 {
11526 /* QADD, QSUB, QDADD, QDSUB */
11527 record_buf[0] = ARM_PS_REGNUM;
11528 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11529 arm_insn_r->reg_rec_count = 2;
11530 }
11531 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11532 {
11533 /* BKPT. */
11534 record_buf[0] = ARM_PS_REGNUM;
11535 record_buf[1] = ARM_LR_REGNUM;
11536 arm_insn_r->reg_rec_count = 2;
11537
11538 /* Save SPSR also;how? */
11539 return -1;
11540 }
11541 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11542 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11543 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11544 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11545 )
11546 {
11547 if (0 == insn_op1 || 1 == insn_op1)
11548 {
11549 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11550 /* We dont do optimization for SMULW<y> where we
11551 need only Rd. */
11552 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11553 record_buf[1] = ARM_PS_REGNUM;
11554 arm_insn_r->reg_rec_count = 2;
11555 }
11556 else if (2 == insn_op1)
11557 {
11558 /* SMLAL<x><y>. */
11559 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11560 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11561 arm_insn_r->reg_rec_count = 2;
11562 }
11563 else if (3 == insn_op1)
11564 {
11565 /* SMUL<x><y>. */
11566 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11567 arm_insn_r->reg_rec_count = 1;
11568 }
11569 }
11570 }
11571 else
11572 {
11573 /* MSR : immediate form. */
11574 if (1 == insn_op1)
11575 {
11576 /* CSPR is going to be changed. */
11577 record_buf[0] = ARM_PS_REGNUM;
11578 arm_insn_r->reg_rec_count = 1;
11579 }
11580 else if (3 == insn_op1)
11581 {
11582 /* SPSR is going to be changed. */
11583 /* we need to get SPSR value, which is yet to be done */
11584 return -1;
11585 }
11586 }
11587 }
11588
11589 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11590 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11591 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11592
11593 /* Handle load/store insn extension space. */
11594
11595 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11596 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11597 && !INSN_RECORDED(arm_insn_r))
11598 {
11599 /* SWP/SWPB. */
11600 if (0 == insn_op1)
11601 {
11602 /* These insn, changes register and memory as well. */
11603 /* SWP or SWPB insn. */
11604 /* Get memory address given by Rn. */
11605 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11606 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11607 /* SWP insn ?, swaps word. */
11608 if (8 == arm_insn_r->opcode)
11609 {
11610 record_buf_mem[0] = 4;
11611 }
11612 else
11613 {
11614 /* SWPB insn, swaps only byte. */
11615 record_buf_mem[0] = 1;
11616 }
11617 record_buf_mem[1] = u_regval;
11618 arm_insn_r->mem_rec_count = 1;
11619 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11620 arm_insn_r->reg_rec_count = 1;
11621 }
11622 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11623 {
11624 /* STRH. */
11625 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11626 ARM_RECORD_STRH);
11627 }
11628 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11629 {
11630 /* LDRD. */
11631 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11632 record_buf[1] = record_buf[0] + 1;
11633 arm_insn_r->reg_rec_count = 2;
11634 }
11635 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11636 {
11637 /* STRD. */
11638 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11639 ARM_RECORD_STRD);
11640 }
11641 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11642 {
11643 /* LDRH, LDRSB, LDRSH. */
11644 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11645 arm_insn_r->reg_rec_count = 1;
11646 }
11647
11648 }
11649
11650 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11651 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11652 && !INSN_RECORDED(arm_insn_r))
11653 {
11654 ret = -1;
11655 /* Handle coprocessor insn extension space. */
11656 }
11657
11658 /* To be done for ARMv5 and later; as of now we return -1. */
11659 if (-1 == ret)
11660 return ret;
11661
11662 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11663 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11664
11665 return ret;
11666 }
11667
11668 /* Handling opcode 000 insns. */
11669
11670 static int
11671 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11672 {
11673 struct regcache *reg_cache = arm_insn_r->regcache;
11674 uint32_t record_buf[8], record_buf_mem[8];
11675 ULONGEST u_regval[2] = {0};
11676
11677 uint32_t reg_src1 = 0;
11678 uint32_t opcode1 = 0;
11679
11680 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11681 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11682 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11683
11684 if (!((opcode1 & 0x19) == 0x10))
11685 {
11686 /* Data-processing (register) and Data-processing (register-shifted
11687 register */
11688 /* Out of 11 shifter operands mode, all the insn modifies destination
11689 register, which is specified by 13-16 decode. */
11690 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11691 record_buf[1] = ARM_PS_REGNUM;
11692 arm_insn_r->reg_rec_count = 2;
11693 }
11694 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11695 {
11696 /* Miscellaneous instructions */
11697
11698 if (3 == arm_insn_r->decode && 0x12 == opcode1
11699 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11700 {
11701 /* Handle BLX, branch and link/exchange. */
11702 if (9 == arm_insn_r->opcode)
11703 {
11704 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11705 and R14 stores the return address. */
11706 record_buf[0] = ARM_PS_REGNUM;
11707 record_buf[1] = ARM_LR_REGNUM;
11708 arm_insn_r->reg_rec_count = 2;
11709 }
11710 }
11711 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11712 {
11713 /* Handle enhanced software breakpoint insn, BKPT. */
11714 /* CPSR is changed to be executed in ARM state, disabling normal
11715 interrupts, entering abort mode. */
11716 /* According to high vector configuration PC is set. */
11717 /* user hit breakpoint and type reverse, in
11718 that case, we need to go back with previous CPSR and
11719 Program Counter. */
11720 record_buf[0] = ARM_PS_REGNUM;
11721 record_buf[1] = ARM_LR_REGNUM;
11722 arm_insn_r->reg_rec_count = 2;
11723
11724 /* Save SPSR also; how? */
11725 return -1;
11726 }
11727 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11728 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11729 {
11730 /* Handle BX, branch and link/exchange. */
11731 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11732 record_buf[0] = ARM_PS_REGNUM;
11733 arm_insn_r->reg_rec_count = 1;
11734 }
11735 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11736 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11737 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11738 {
11739 /* Count leading zeros: CLZ. */
11740 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11741 arm_insn_r->reg_rec_count = 1;
11742 }
11743 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11744 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11745 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11746 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11747 {
11748 /* Handle MRS insn. */
11749 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11750 arm_insn_r->reg_rec_count = 1;
11751 }
11752 }
11753 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11754 {
11755 /* Multiply and multiply-accumulate */
11756
11757 /* Handle multiply instructions. */
11758 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11759 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11760 {
11761 /* Handle MLA and MUL. */
11762 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11763 record_buf[1] = ARM_PS_REGNUM;
11764 arm_insn_r->reg_rec_count = 2;
11765 }
11766 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11767 {
11768 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11769 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11770 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11771 record_buf[2] = ARM_PS_REGNUM;
11772 arm_insn_r->reg_rec_count = 3;
11773 }
11774 }
11775 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11776 {
11777 /* Synchronization primitives */
11778
11779 /* Handling SWP, SWPB. */
11780 /* These insn, changes register and memory as well. */
11781 /* SWP or SWPB insn. */
11782
11783 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11784 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11785 /* SWP insn ?, swaps word. */
11786 if (8 == arm_insn_r->opcode)
11787 {
11788 record_buf_mem[0] = 4;
11789 }
11790 else
11791 {
11792 /* SWPB insn, swaps only byte. */
11793 record_buf_mem[0] = 1;
11794 }
11795 record_buf_mem[1] = u_regval[0];
11796 arm_insn_r->mem_rec_count = 1;
11797 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11798 arm_insn_r->reg_rec_count = 1;
11799 }
11800 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11801 || 15 == arm_insn_r->decode)
11802 {
11803 if ((opcode1 & 0x12) == 2)
11804 {
11805 /* Extra load/store (unprivileged) */
11806 return -1;
11807 }
11808 else
11809 {
11810 /* Extra load/store */
11811 switch (bits (arm_insn_r->arm_insn, 5, 6))
11812 {
11813 case 1:
11814 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11815 {
11816 /* STRH (register), STRH (immediate) */
11817 arm_record_strx (arm_insn_r, &record_buf[0],
11818 &record_buf_mem[0], ARM_RECORD_STRH);
11819 }
11820 else if ((opcode1 & 0x05) == 0x1)
11821 {
11822 /* LDRH (register) */
11823 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11824 arm_insn_r->reg_rec_count = 1;
11825
11826 if (bit (arm_insn_r->arm_insn, 21))
11827 {
11828 /* Write back to Rn. */
11829 record_buf[arm_insn_r->reg_rec_count++]
11830 = bits (arm_insn_r->arm_insn, 16, 19);
11831 }
11832 }
11833 else if ((opcode1 & 0x05) == 0x5)
11834 {
11835 /* LDRH (immediate), LDRH (literal) */
11836 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11837
11838 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11839 arm_insn_r->reg_rec_count = 1;
11840
11841 if (rn != 15)
11842 {
11843 /*LDRH (immediate) */
11844 if (bit (arm_insn_r->arm_insn, 21))
11845 {
11846 /* Write back to Rn. */
11847 record_buf[arm_insn_r->reg_rec_count++] = rn;
11848 }
11849 }
11850 }
11851 else
11852 return -1;
11853 break;
11854 case 2:
11855 if ((opcode1 & 0x05) == 0x0)
11856 {
11857 /* LDRD (register) */
11858 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11859 record_buf[1] = record_buf[0] + 1;
11860 arm_insn_r->reg_rec_count = 2;
11861
11862 if (bit (arm_insn_r->arm_insn, 21))
11863 {
11864 /* Write back to Rn. */
11865 record_buf[arm_insn_r->reg_rec_count++]
11866 = bits (arm_insn_r->arm_insn, 16, 19);
11867 }
11868 }
11869 else if ((opcode1 & 0x05) == 0x1)
11870 {
11871 /* LDRSB (register) */
11872 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11873 arm_insn_r->reg_rec_count = 1;
11874
11875 if (bit (arm_insn_r->arm_insn, 21))
11876 {
11877 /* Write back to Rn. */
11878 record_buf[arm_insn_r->reg_rec_count++]
11879 = bits (arm_insn_r->arm_insn, 16, 19);
11880 }
11881 }
11882 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11883 {
11884 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11885 LDRSB (literal) */
11886 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11887
11888 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11889 arm_insn_r->reg_rec_count = 1;
11890
11891 if (rn != 15)
11892 {
11893 /*LDRD (immediate), LDRSB (immediate) */
11894 if (bit (arm_insn_r->arm_insn, 21))
11895 {
11896 /* Write back to Rn. */
11897 record_buf[arm_insn_r->reg_rec_count++] = rn;
11898 }
11899 }
11900 }
11901 else
11902 return -1;
11903 break;
11904 case 3:
11905 if ((opcode1 & 0x05) == 0x0)
11906 {
11907 /* STRD (register) */
11908 arm_record_strx (arm_insn_r, &record_buf[0],
11909 &record_buf_mem[0], ARM_RECORD_STRD);
11910 }
11911 else if ((opcode1 & 0x05) == 0x1)
11912 {
11913 /* LDRSH (register) */
11914 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11915 arm_insn_r->reg_rec_count = 1;
11916
11917 if (bit (arm_insn_r->arm_insn, 21))
11918 {
11919 /* Write back to Rn. */
11920 record_buf[arm_insn_r->reg_rec_count++]
11921 = bits (arm_insn_r->arm_insn, 16, 19);
11922 }
11923 }
11924 else if ((opcode1 & 0x05) == 0x4)
11925 {
11926 /* STRD (immediate) */
11927 arm_record_strx (arm_insn_r, &record_buf[0],
11928 &record_buf_mem[0], ARM_RECORD_STRD);
11929 }
11930 else if ((opcode1 & 0x05) == 0x5)
11931 {
11932 /* LDRSH (immediate), LDRSH (literal) */
11933 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11934 arm_insn_r->reg_rec_count = 1;
11935
11936 if (bit (arm_insn_r->arm_insn, 21))
11937 {
11938 /* Write back to Rn. */
11939 record_buf[arm_insn_r->reg_rec_count++]
11940 = bits (arm_insn_r->arm_insn, 16, 19);
11941 }
11942 }
11943 else
11944 return -1;
11945 break;
11946 default:
11947 return -1;
11948 }
11949 }
11950 }
11951 else
11952 {
11953 return -1;
11954 }
11955
11956 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11957 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11958 return 0;
11959 }
11960
11961 /* Handling opcode 001 insns. */
11962
11963 static int
11964 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11965 {
11966 uint32_t record_buf[8], record_buf_mem[8];
11967
11968 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11969 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11970
11971 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11972 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11973 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11974 )
11975 {
11976 /* Handle MSR insn. */
11977 if (9 == arm_insn_r->opcode)
11978 {
11979 /* CSPR is going to be changed. */
11980 record_buf[0] = ARM_PS_REGNUM;
11981 arm_insn_r->reg_rec_count = 1;
11982 }
11983 else
11984 {
11985 /* SPSR is going to be changed. */
11986 }
11987 }
11988 else if (arm_insn_r->opcode <= 15)
11989 {
11990 /* Normal data processing insns. */
11991 /* Out of 11 shifter operands mode, all the insn modifies destination
11992 register, which is specified by 13-16 decode. */
11993 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11994 record_buf[1] = ARM_PS_REGNUM;
11995 arm_insn_r->reg_rec_count = 2;
11996 }
11997 else
11998 {
11999 return -1;
12000 }
12001
12002 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12003 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12004 return 0;
12005 }
12006
12007 static int
12008 arm_record_media (arm_insn_decode_record *arm_insn_r)
12009 {
12010 uint32_t record_buf[8];
12011
12012 switch (bits (arm_insn_r->arm_insn, 22, 24))
12013 {
12014 case 0:
12015 /* Parallel addition and subtraction, signed */
12016 case 1:
12017 /* Parallel addition and subtraction, unsigned */
12018 case 2:
12019 case 3:
12020 /* Packing, unpacking, saturation and reversal */
12021 {
12022 int rd = bits (arm_insn_r->arm_insn, 12, 15);
12023
12024 record_buf[arm_insn_r->reg_rec_count++] = rd;
12025 }
12026 break;
12027
12028 case 4:
12029 case 5:
12030 /* Signed multiplies */
12031 {
12032 int rd = bits (arm_insn_r->arm_insn, 16, 19);
12033 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
12034
12035 record_buf[arm_insn_r->reg_rec_count++] = rd;
12036 if (op1 == 0x0)
12037 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12038 else if (op1 == 0x4)
12039 record_buf[arm_insn_r->reg_rec_count++]
12040 = bits (arm_insn_r->arm_insn, 12, 15);
12041 }
12042 break;
12043
12044 case 6:
12045 {
12046 if (bit (arm_insn_r->arm_insn, 21)
12047 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
12048 {
12049 /* SBFX */
12050 record_buf[arm_insn_r->reg_rec_count++]
12051 = bits (arm_insn_r->arm_insn, 12, 15);
12052 }
12053 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
12054 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
12055 {
12056 /* USAD8 and USADA8 */
12057 record_buf[arm_insn_r->reg_rec_count++]
12058 = bits (arm_insn_r->arm_insn, 16, 19);
12059 }
12060 }
12061 break;
12062
12063 case 7:
12064 {
12065 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
12066 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
12067 {
12068 /* Permanently UNDEFINED */
12069 return -1;
12070 }
12071 else
12072 {
12073 /* BFC, BFI and UBFX */
12074 record_buf[arm_insn_r->reg_rec_count++]
12075 = bits (arm_insn_r->arm_insn, 12, 15);
12076 }
12077 }
12078 break;
12079
12080 default:
12081 return -1;
12082 }
12083
12084 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12085
12086 return 0;
12087 }
12088
12089 /* Handle ARM mode instructions with opcode 010. */
12090
12091 static int
12092 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
12093 {
12094 struct regcache *reg_cache = arm_insn_r->regcache;
12095
12096 uint32_t reg_base , reg_dest;
12097 uint32_t offset_12, tgt_mem_addr;
12098 uint32_t record_buf[8], record_buf_mem[8];
12099 unsigned char wback;
12100 ULONGEST u_regval;
12101
12102 /* Calculate wback. */
12103 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12104 || (bit (arm_insn_r->arm_insn, 21) == 1);
12105
12106 arm_insn_r->reg_rec_count = 0;
12107 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12108
12109 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12110 {
12111 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12112 and LDRT. */
12113
12114 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12115 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12116
12117 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12118 preceeds a LDR instruction having R15 as reg_base, it
12119 emulates a branch and link instruction, and hence we need to save
12120 CPSR and PC as well. */
12121 if (ARM_PC_REGNUM == reg_dest)
12122 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12123
12124 /* If wback is true, also save the base register, which is going to be
12125 written to. */
12126 if (wback)
12127 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12128 }
12129 else
12130 {
12131 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12132
12133 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12134 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12135
12136 /* Handle bit U. */
12137 if (bit (arm_insn_r->arm_insn, 23))
12138 {
12139 /* U == 1: Add the offset. */
12140 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12141 }
12142 else
12143 {
12144 /* U == 0: subtract the offset. */
12145 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12146 }
12147
12148 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12149 bytes. */
12150 if (bit (arm_insn_r->arm_insn, 22))
12151 {
12152 /* STRB and STRBT: 1 byte. */
12153 record_buf_mem[0] = 1;
12154 }
12155 else
12156 {
12157 /* STR and STRT: 4 bytes. */
12158 record_buf_mem[0] = 4;
12159 }
12160
12161 /* Handle bit P. */
12162 if (bit (arm_insn_r->arm_insn, 24))
12163 record_buf_mem[1] = tgt_mem_addr;
12164 else
12165 record_buf_mem[1] = (uint32_t) u_regval;
12166
12167 arm_insn_r->mem_rec_count = 1;
12168
12169 /* If wback is true, also save the base register, which is going to be
12170 written to. */
12171 if (wback)
12172 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12173 }
12174
12175 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12176 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12177 return 0;
12178 }
12179
12180 /* Handling opcode 011 insns. */
12181
12182 static int
12183 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12184 {
12185 struct regcache *reg_cache = arm_insn_r->regcache;
12186
12187 uint32_t shift_imm = 0;
12188 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12189 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12190 uint32_t record_buf[8], record_buf_mem[8];
12191
12192 LONGEST s_word;
12193 ULONGEST u_regval[2];
12194
12195 if (bit (arm_insn_r->arm_insn, 4))
12196 return arm_record_media (arm_insn_r);
12197
12198 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12199 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12200
12201 /* Handle enhanced store insns and LDRD DSP insn,
12202 order begins according to addressing modes for store insns
12203 STRH insn. */
12204
12205 /* LDR or STR? */
12206 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12207 {
12208 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12209 /* LDR insn has a capability to do branching, if
12210 MOV LR, PC is preceded by LDR insn having Rn as R15
12211 in that case, it emulates branch and link insn, and hence we
12212 need to save CSPR and PC as well. */
12213 if (15 != reg_dest)
12214 {
12215 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12216 arm_insn_r->reg_rec_count = 1;
12217 }
12218 else
12219 {
12220 record_buf[0] = reg_dest;
12221 record_buf[1] = ARM_PS_REGNUM;
12222 arm_insn_r->reg_rec_count = 2;
12223 }
12224 }
12225 else
12226 {
12227 if (! bits (arm_insn_r->arm_insn, 4, 11))
12228 {
12229 /* Store insn, register offset and register pre-indexed,
12230 register post-indexed. */
12231 /* Get Rm. */
12232 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12233 /* Get Rn. */
12234 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12235 regcache_raw_read_unsigned (reg_cache, reg_src1
12236 , &u_regval[0]);
12237 regcache_raw_read_unsigned (reg_cache, reg_src2
12238 , &u_regval[1]);
12239 if (15 == reg_src2)
12240 {
12241 /* If R15 was used as Rn, hence current PC+8. */
12242 /* Pre-indexed mode doesnt reach here ; illegal insn. */
12243 u_regval[0] = u_regval[0] + 8;
12244 }
12245 /* Calculate target store address, Rn +/- Rm, register offset. */
12246 /* U == 1. */
12247 if (bit (arm_insn_r->arm_insn, 23))
12248 {
12249 tgt_mem_addr = u_regval[0] + u_regval[1];
12250 }
12251 else
12252 {
12253 tgt_mem_addr = u_regval[1] - u_regval[0];
12254 }
12255
12256 switch (arm_insn_r->opcode)
12257 {
12258 /* STR. */
12259 case 8:
12260 case 12:
12261 /* STR. */
12262 case 9:
12263 case 13:
12264 /* STRT. */
12265 case 1:
12266 case 5:
12267 /* STR. */
12268 case 0:
12269 case 4:
12270 record_buf_mem[0] = 4;
12271 break;
12272
12273 /* STRB. */
12274 case 10:
12275 case 14:
12276 /* STRB. */
12277 case 11:
12278 case 15:
12279 /* STRBT. */
12280 case 3:
12281 case 7:
12282 /* STRB. */
12283 case 2:
12284 case 6:
12285 record_buf_mem[0] = 1;
12286 break;
12287
12288 default:
12289 gdb_assert_not_reached ("no decoding pattern found");
12290 break;
12291 }
12292 record_buf_mem[1] = tgt_mem_addr;
12293 arm_insn_r->mem_rec_count = 1;
12294
12295 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12296 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12297 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12298 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12299 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12300 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12301 )
12302 {
12303 /* Rn is going to be changed in pre-indexed mode and
12304 post-indexed mode as well. */
12305 record_buf[0] = reg_src2;
12306 arm_insn_r->reg_rec_count = 1;
12307 }
12308 }
12309 else
12310 {
12311 /* Store insn, scaled register offset; scaled pre-indexed. */
12312 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12313 /* Get Rm. */
12314 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12315 /* Get Rn. */
12316 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12317 /* Get shift_imm. */
12318 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12319 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12320 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12321 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12322 /* Offset_12 used as shift. */
12323 switch (offset_12)
12324 {
12325 case 0:
12326 /* Offset_12 used as index. */
12327 offset_12 = u_regval[0] << shift_imm;
12328 break;
12329
12330 case 1:
12331 offset_12 = (!shift_imm) ? 0 : u_regval[0] >> shift_imm;
12332 break;
12333
12334 case 2:
12335 if (!shift_imm)
12336 {
12337 if (bit (u_regval[0], 31))
12338 {
12339 offset_12 = 0xFFFFFFFF;
12340 }
12341 else
12342 {
12343 offset_12 = 0;
12344 }
12345 }
12346 else
12347 {
12348 /* This is arithmetic shift. */
12349 offset_12 = s_word >> shift_imm;
12350 }
12351 break;
12352
12353 case 3:
12354 if (!shift_imm)
12355 {
12356 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12357 &u_regval[1]);
12358 /* Get C flag value and shift it by 31. */
12359 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12360 | (u_regval[0]) >> 1);
12361 }
12362 else
12363 {
12364 offset_12 = (u_regval[0] >> shift_imm) \
12365 | (u_regval[0] <<
12366 (sizeof(uint32_t) - shift_imm));
12367 }
12368 break;
12369
12370 default:
12371 gdb_assert_not_reached ("no decoding pattern found");
12372 break;
12373 }
12374
12375 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12376 /* bit U set. */
12377 if (bit (arm_insn_r->arm_insn, 23))
12378 {
12379 tgt_mem_addr = u_regval[1] + offset_12;
12380 }
12381 else
12382 {
12383 tgt_mem_addr = u_regval[1] - offset_12;
12384 }
12385
12386 switch (arm_insn_r->opcode)
12387 {
12388 /* STR. */
12389 case 8:
12390 case 12:
12391 /* STR. */
12392 case 9:
12393 case 13:
12394 /* STRT. */
12395 case 1:
12396 case 5:
12397 /* STR. */
12398 case 0:
12399 case 4:
12400 record_buf_mem[0] = 4;
12401 break;
12402
12403 /* STRB. */
12404 case 10:
12405 case 14:
12406 /* STRB. */
12407 case 11:
12408 case 15:
12409 /* STRBT. */
12410 case 3:
12411 case 7:
12412 /* STRB. */
12413 case 2:
12414 case 6:
12415 record_buf_mem[0] = 1;
12416 break;
12417
12418 default:
12419 gdb_assert_not_reached ("no decoding pattern found");
12420 break;
12421 }
12422 record_buf_mem[1] = tgt_mem_addr;
12423 arm_insn_r->mem_rec_count = 1;
12424
12425 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12426 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12427 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12428 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12429 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12430 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12431 )
12432 {
12433 /* Rn is going to be changed in register scaled pre-indexed
12434 mode,and scaled post indexed mode. */
12435 record_buf[0] = reg_src2;
12436 arm_insn_r->reg_rec_count = 1;
12437 }
12438 }
12439 }
12440
12441 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12442 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12443 return 0;
12444 }
12445
12446 /* Handle ARM mode instructions with opcode 100. */
12447
12448 static int
12449 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12450 {
12451 struct regcache *reg_cache = arm_insn_r->regcache;
12452 uint32_t register_count = 0, register_bits;
12453 uint32_t reg_base, addr_mode;
12454 uint32_t record_buf[24], record_buf_mem[48];
12455 uint32_t wback;
12456 ULONGEST u_regval;
12457
12458 /* Fetch the list of registers. */
12459 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12460 arm_insn_r->reg_rec_count = 0;
12461
12462 /* Fetch the base register that contains the address we are loading data
12463 to. */
12464 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12465
12466 /* Calculate wback. */
12467 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12468
12469 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12470 {
12471 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12472
12473 /* Find out which registers are going to be loaded from memory. */
12474 while (register_bits)
12475 {
12476 if (register_bits & 0x00000001)
12477 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12478 register_bits = register_bits >> 1;
12479 register_count++;
12480 }
12481
12482
12483 /* If wback is true, also save the base register, which is going to be
12484 written to. */
12485 if (wback)
12486 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12487
12488 /* Save the CPSR register. */
12489 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12490 }
12491 else
12492 {
12493 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12494
12495 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12496
12497 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12498
12499 /* Find out how many registers are going to be stored to memory. */
12500 while (register_bits)
12501 {
12502 if (register_bits & 0x00000001)
12503 register_count++;
12504 register_bits = register_bits >> 1;
12505 }
12506
12507 switch (addr_mode)
12508 {
12509 /* STMDA (STMED): Decrement after. */
12510 case 0:
12511 record_buf_mem[1] = (uint32_t) u_regval
12512 - register_count * ARM_INT_REGISTER_SIZE + 4;
12513 break;
12514 /* STM (STMIA, STMEA): Increment after. */
12515 case 1:
12516 record_buf_mem[1] = (uint32_t) u_regval;
12517 break;
12518 /* STMDB (STMFD): Decrement before. */
12519 case 2:
12520 record_buf_mem[1] = (uint32_t) u_regval
12521 - register_count * ARM_INT_REGISTER_SIZE;
12522 break;
12523 /* STMIB (STMFA): Increment before. */
12524 case 3:
12525 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12526 break;
12527 default:
12528 gdb_assert_not_reached ("no decoding pattern found");
12529 break;
12530 }
12531
12532 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12533 arm_insn_r->mem_rec_count = 1;
12534
12535 /* If wback is true, also save the base register, which is going to be
12536 written to. */
12537 if (wback)
12538 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12539 }
12540
12541 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12542 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12543 return 0;
12544 }
12545
12546 /* Handling opcode 101 insns. */
12547
12548 static int
12549 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12550 {
12551 uint32_t record_buf[8];
12552
12553 /* Handle B, BL, BLX(1) insns. */
12554 /* B simply branches so we do nothing here. */
12555 /* Note: BLX(1) doesnt fall here but instead it falls into
12556 extension space. */
12557 if (bit (arm_insn_r->arm_insn, 24))
12558 {
12559 record_buf[0] = ARM_LR_REGNUM;
12560 arm_insn_r->reg_rec_count = 1;
12561 }
12562
12563 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12564
12565 return 0;
12566 }
12567
12568 static int
12569 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12570 {
12571 gdb_printf (gdb_stderr,
12572 _("Process record does not support instruction "
12573 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12574 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12575
12576 return -1;
12577 }
12578
12579 /* Record handler for vector data transfer instructions. */
12580
12581 static int
12582 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12583 {
12584 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12585 uint32_t record_buf[4];
12586
12587 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12588 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12589 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12590 bit_l = bit (arm_insn_r->arm_insn, 20);
12591 bit_c = bit (arm_insn_r->arm_insn, 8);
12592
12593 /* Handle VMOV instruction. */
12594 if (bit_l && bit_c)
12595 {
12596 record_buf[0] = reg_t;
12597 arm_insn_r->reg_rec_count = 1;
12598 }
12599 else if (bit_l && !bit_c)
12600 {
12601 /* Handle VMOV instruction. */
12602 if (bits_a == 0x00)
12603 {
12604 record_buf[0] = reg_t;
12605 arm_insn_r->reg_rec_count = 1;
12606 }
12607 /* Handle VMRS instruction. */
12608 else if (bits_a == 0x07)
12609 {
12610 if (reg_t == 15)
12611 reg_t = ARM_PS_REGNUM;
12612
12613 record_buf[0] = reg_t;
12614 arm_insn_r->reg_rec_count = 1;
12615 }
12616 }
12617 else if (!bit_l && !bit_c)
12618 {
12619 /* Handle VMOV instruction. */
12620 if (bits_a == 0x00)
12621 {
12622 record_buf[0] = ARM_D0_REGNUM + reg_v;
12623
12624 arm_insn_r->reg_rec_count = 1;
12625 }
12626 /* Handle VMSR instruction. */
12627 else if (bits_a == 0x07)
12628 {
12629 record_buf[0] = ARM_FPSCR_REGNUM;
12630 arm_insn_r->reg_rec_count = 1;
12631 }
12632 }
12633 else if (!bit_l && bit_c)
12634 {
12635 /* Handle VMOV instruction. */
12636 if (!(bits_a & 0x04))
12637 {
12638 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12639 + ARM_D0_REGNUM;
12640 arm_insn_r->reg_rec_count = 1;
12641 }
12642 /* Handle VDUP instruction. */
12643 else
12644 {
12645 if (bit (arm_insn_r->arm_insn, 21))
12646 {
12647 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12648 record_buf[0] = reg_v + ARM_D0_REGNUM;
12649 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12650 arm_insn_r->reg_rec_count = 2;
12651 }
12652 else
12653 {
12654 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12655 record_buf[0] = reg_v + ARM_D0_REGNUM;
12656 arm_insn_r->reg_rec_count = 1;
12657 }
12658 }
12659 }
12660
12661 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12662 return 0;
12663 }
12664
12665 /* Record handler for extension register load/store instructions. */
12666
12667 static int
12668 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12669 {
12670 uint32_t opcode, single_reg;
12671 uint8_t op_vldm_vstm;
12672 uint32_t record_buf[8], record_buf_mem[128];
12673 ULONGEST u_regval = 0;
12674
12675 struct regcache *reg_cache = arm_insn_r->regcache;
12676
12677 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12678 single_reg = !bit (arm_insn_r->arm_insn, 8);
12679 op_vldm_vstm = opcode & 0x1b;
12680
12681 /* Handle VMOV instructions. */
12682 if ((opcode & 0x1e) == 0x04)
12683 {
12684 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12685 {
12686 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12687 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12688 arm_insn_r->reg_rec_count = 2;
12689 }
12690 else
12691 {
12692 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12693 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12694
12695 if (single_reg)
12696 {
12697 /* The first S register number m is REG_M:M (M is bit 5),
12698 the corresponding D register number is REG_M:M / 2, which
12699 is REG_M. */
12700 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12701 /* The second S register number is REG_M:M + 1, the
12702 corresponding D register number is (REG_M:M + 1) / 2.
12703 IOW, if bit M is 1, the first and second S registers
12704 are mapped to different D registers, otherwise, they are
12705 in the same D register. */
12706 if (bit_m)
12707 {
12708 record_buf[arm_insn_r->reg_rec_count++]
12709 = ARM_D0_REGNUM + reg_m + 1;
12710 }
12711 }
12712 else
12713 {
12714 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12715 arm_insn_r->reg_rec_count = 1;
12716 }
12717 }
12718 }
12719 /* Handle VSTM and VPUSH instructions. */
12720 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12721 || op_vldm_vstm == 0x12)
12722 {
12723 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12724 uint32_t memory_index = 0;
12725
12726 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12727 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12728 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12729 imm_off32 = imm_off8 << 2;
12730 memory_count = imm_off8;
12731
12732 if (bit (arm_insn_r->arm_insn, 23))
12733 start_address = u_regval;
12734 else
12735 start_address = u_regval - imm_off32;
12736
12737 if (bit (arm_insn_r->arm_insn, 21))
12738 {
12739 record_buf[0] = reg_rn;
12740 arm_insn_r->reg_rec_count = 1;
12741 }
12742
12743 while (memory_count > 0)
12744 {
12745 if (single_reg)
12746 {
12747 record_buf_mem[memory_index] = 4;
12748 record_buf_mem[memory_index + 1] = start_address;
12749 start_address = start_address + 4;
12750 memory_index = memory_index + 2;
12751 }
12752 else
12753 {
12754 record_buf_mem[memory_index] = 4;
12755 record_buf_mem[memory_index + 1] = start_address;
12756 record_buf_mem[memory_index + 2] = 4;
12757 record_buf_mem[memory_index + 3] = start_address + 4;
12758 start_address = start_address + 8;
12759 memory_index = memory_index + 4;
12760 }
12761 memory_count--;
12762 }
12763 arm_insn_r->mem_rec_count = (memory_index >> 1);
12764 }
12765 /* Handle VLDM instructions. */
12766 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12767 || op_vldm_vstm == 0x13)
12768 {
12769 uint32_t reg_count, reg_vd;
12770 uint32_t reg_index = 0;
12771 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12772
12773 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12774 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12775
12776 /* REG_VD is the first D register number. If the instruction
12777 loads memory to S registers (SINGLE_REG is TRUE), the register
12778 number is (REG_VD << 1 | bit D), so the corresponding D
12779 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12780 if (!single_reg)
12781 reg_vd = reg_vd | (bit_d << 4);
12782
12783 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12784 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12785
12786 /* If the instruction loads memory to D register, REG_COUNT should
12787 be divided by 2, according to the ARM Architecture Reference
12788 Manual. If the instruction loads memory to S register, divide by
12789 2 as well because two S registers are mapped to D register. */
12790 reg_count = reg_count / 2;
12791 if (single_reg && bit_d)
12792 {
12793 /* Increase the register count if S register list starts from
12794 an odd number (bit d is one). */
12795 reg_count++;
12796 }
12797
12798 while (reg_count > 0)
12799 {
12800 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12801 reg_count--;
12802 }
12803 arm_insn_r->reg_rec_count = reg_index;
12804 }
12805 /* VSTR Vector store register. */
12806 else if ((opcode & 0x13) == 0x10)
12807 {
12808 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12809 uint32_t memory_index = 0;
12810
12811 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12812 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12813 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12814 imm_off32 = imm_off8 << 2;
12815
12816 if (bit (arm_insn_r->arm_insn, 23))
12817 start_address = u_regval + imm_off32;
12818 else
12819 start_address = u_regval - imm_off32;
12820
12821 if (single_reg)
12822 {
12823 record_buf_mem[memory_index] = 4;
12824 record_buf_mem[memory_index + 1] = start_address;
12825 arm_insn_r->mem_rec_count = 1;
12826 }
12827 else
12828 {
12829 record_buf_mem[memory_index] = 4;
12830 record_buf_mem[memory_index + 1] = start_address;
12831 record_buf_mem[memory_index + 2] = 4;
12832 record_buf_mem[memory_index + 3] = start_address + 4;
12833 arm_insn_r->mem_rec_count = 2;
12834 }
12835 }
12836 /* VLDR Vector load register. */
12837 else if ((opcode & 0x13) == 0x11)
12838 {
12839 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12840
12841 if (!single_reg)
12842 {
12843 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12844 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12845 }
12846 else
12847 {
12848 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12849 /* Record register D rather than pseudo register S. */
12850 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12851 }
12852 arm_insn_r->reg_rec_count = 1;
12853 }
12854
12855 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12856 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12857 return 0;
12858 }
12859
12860 /* Record handler for arm/thumb mode VFP data processing instructions. */
12861
12862 static int
12863 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12864 {
12865 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12866 uint32_t record_buf[4];
12867 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12868 enum insn_types curr_insn_type = INSN_INV;
12869
12870 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12871 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12872 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12873 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12874 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12875 bit_d = bit (arm_insn_r->arm_insn, 22);
12876 /* Mask off the "D" bit. */
12877 opc1 = opc1 & ~0x04;
12878
12879 /* Handle VMLA, VMLS. */
12880 if (opc1 == 0x00)
12881 {
12882 if (bit (arm_insn_r->arm_insn, 10))
12883 {
12884 if (bit (arm_insn_r->arm_insn, 6))
12885 curr_insn_type = INSN_T0;
12886 else
12887 curr_insn_type = INSN_T1;
12888 }
12889 else
12890 {
12891 if (dp_op_sz)
12892 curr_insn_type = INSN_T1;
12893 else
12894 curr_insn_type = INSN_T2;
12895 }
12896 }
12897 /* Handle VNMLA, VNMLS, VNMUL. */
12898 else if (opc1 == 0x01)
12899 {
12900 if (dp_op_sz)
12901 curr_insn_type = INSN_T1;
12902 else
12903 curr_insn_type = INSN_T2;
12904 }
12905 /* Handle VMUL. */
12906 else if (opc1 == 0x02 && !(opc3 & 0x01))
12907 {
12908 if (bit (arm_insn_r->arm_insn, 10))
12909 {
12910 if (bit (arm_insn_r->arm_insn, 6))
12911 curr_insn_type = INSN_T0;
12912 else
12913 curr_insn_type = INSN_T1;
12914 }
12915 else
12916 {
12917 if (dp_op_sz)
12918 curr_insn_type = INSN_T1;
12919 else
12920 curr_insn_type = INSN_T2;
12921 }
12922 }
12923 /* Handle VADD, VSUB. */
12924 else if (opc1 == 0x03)
12925 {
12926 if (!bit (arm_insn_r->arm_insn, 9))
12927 {
12928 if (bit (arm_insn_r->arm_insn, 6))
12929 curr_insn_type = INSN_T0;
12930 else
12931 curr_insn_type = INSN_T1;
12932 }
12933 else
12934 {
12935 if (dp_op_sz)
12936 curr_insn_type = INSN_T1;
12937 else
12938 curr_insn_type = INSN_T2;
12939 }
12940 }
12941 /* Handle VDIV. */
12942 else if (opc1 == 0x08)
12943 {
12944 if (dp_op_sz)
12945 curr_insn_type = INSN_T1;
12946 else
12947 curr_insn_type = INSN_T2;
12948 }
12949 /* Handle all other vfp data processing instructions. */
12950 else if (opc1 == 0x0b)
12951 {
12952 /* Handle VMOV. */
12953 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12954 {
12955 if (bit (arm_insn_r->arm_insn, 4))
12956 {
12957 if (bit (arm_insn_r->arm_insn, 6))
12958 curr_insn_type = INSN_T0;
12959 else
12960 curr_insn_type = INSN_T1;
12961 }
12962 else
12963 {
12964 if (dp_op_sz)
12965 curr_insn_type = INSN_T1;
12966 else
12967 curr_insn_type = INSN_T2;
12968 }
12969 }
12970 /* Handle VNEG and VABS. */
12971 else if ((opc2 == 0x01 && opc3 == 0x01)
12972 || (opc2 == 0x00 && opc3 == 0x03))
12973 {
12974 if (!bit (arm_insn_r->arm_insn, 11))
12975 {
12976 if (bit (arm_insn_r->arm_insn, 6))
12977 curr_insn_type = INSN_T0;
12978 else
12979 curr_insn_type = INSN_T1;
12980 }
12981 else
12982 {
12983 if (dp_op_sz)
12984 curr_insn_type = INSN_T1;
12985 else
12986 curr_insn_type = INSN_T2;
12987 }
12988 }
12989 /* Handle VSQRT. */
12990 else if (opc2 == 0x01 && opc3 == 0x03)
12991 {
12992 if (dp_op_sz)
12993 curr_insn_type = INSN_T1;
12994 else
12995 curr_insn_type = INSN_T2;
12996 }
12997 /* Handle VCVT. */
12998 else if (opc2 == 0x07 && opc3 == 0x03)
12999 {
13000 if (!dp_op_sz)
13001 curr_insn_type = INSN_T1;
13002 else
13003 curr_insn_type = INSN_T2;
13004 }
13005 else if (opc3 & 0x01)
13006 {
13007 /* Handle VCVT. */
13008 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
13009 {
13010 if (!bit (arm_insn_r->arm_insn, 18))
13011 curr_insn_type = INSN_T2;
13012 else
13013 {
13014 if (dp_op_sz)
13015 curr_insn_type = INSN_T1;
13016 else
13017 curr_insn_type = INSN_T2;
13018 }
13019 }
13020 /* Handle VCVT. */
13021 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
13022 {
13023 if (dp_op_sz)
13024 curr_insn_type = INSN_T1;
13025 else
13026 curr_insn_type = INSN_T2;
13027 }
13028 /* Handle VCVTB, VCVTT. */
13029 else if ((opc2 & 0x0e) == 0x02)
13030 curr_insn_type = INSN_T2;
13031 /* Handle VCMP, VCMPE. */
13032 else if ((opc2 & 0x0e) == 0x04)
13033 curr_insn_type = INSN_T3;
13034 }
13035 }
13036
13037 switch (curr_insn_type)
13038 {
13039 case INSN_T0:
13040 reg_vd = reg_vd | (bit_d << 4);
13041 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13042 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
13043 arm_insn_r->reg_rec_count = 2;
13044 break;
13045
13046 case INSN_T1:
13047 reg_vd = reg_vd | (bit_d << 4);
13048 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13049 arm_insn_r->reg_rec_count = 1;
13050 break;
13051
13052 case INSN_T2:
13053 reg_vd = (reg_vd << 1) | bit_d;
13054 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13055 arm_insn_r->reg_rec_count = 1;
13056 break;
13057
13058 case INSN_T3:
13059 record_buf[0] = ARM_FPSCR_REGNUM;
13060 arm_insn_r->reg_rec_count = 1;
13061 break;
13062
13063 default:
13064 gdb_assert_not_reached ("no decoding pattern found");
13065 break;
13066 }
13067
13068 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
13069 return 0;
13070 }
13071
13072 /* Handling opcode 110 insns. */
13073
13074 static int
13075 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
13076 {
13077 uint32_t op1, op1_ebit, coproc;
13078
13079 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13080 op1 = bits (arm_insn_r->arm_insn, 20, 25);
13081 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13082
13083 if ((coproc & 0x0e) == 0x0a)
13084 {
13085 /* Handle extension register ld/st instructions. */
13086 if (!(op1 & 0x20))
13087 return arm_record_exreg_ld_st_insn (arm_insn_r);
13088
13089 /* 64-bit transfers between arm core and extension registers. */
13090 if ((op1 & 0x3e) == 0x04)
13091 return arm_record_exreg_ld_st_insn (arm_insn_r);
13092 }
13093 else
13094 {
13095 /* Handle coprocessor ld/st instructions. */
13096 if (!(op1 & 0x3a))
13097 {
13098 /* Store. */
13099 if (!op1_ebit)
13100 return arm_record_unsupported_insn (arm_insn_r);
13101 else
13102 /* Load. */
13103 return arm_record_unsupported_insn (arm_insn_r);
13104 }
13105
13106 /* Move to coprocessor from two arm core registers. */
13107 if (op1 == 0x4)
13108 return arm_record_unsupported_insn (arm_insn_r);
13109
13110 /* Move to two arm core registers from coprocessor. */
13111 if (op1 == 0x5)
13112 {
13113 uint32_t reg_t[2];
13114
13115 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13116 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13117 arm_insn_r->reg_rec_count = 2;
13118
13119 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13120 return 0;
13121 }
13122 }
13123 return arm_record_unsupported_insn (arm_insn_r);
13124 }
13125
13126 /* Handling opcode 111 insns. */
13127
13128 static int
13129 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13130 {
13131 uint32_t op, op1_ebit, coproc, bits_24_25;
13132 arm_gdbarch_tdep *tdep
13133 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13134 struct regcache *reg_cache = arm_insn_r->regcache;
13135
13136 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13137 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13138 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13139 op = bit (arm_insn_r->arm_insn, 4);
13140 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13141
13142 /* Handle arm SWI/SVC system call instructions. */
13143 if (bits_24_25 == 0x3)
13144 {
13145 if (tdep->arm_syscall_record != NULL)
13146 {
13147 ULONGEST svc_operand, svc_number;
13148
13149 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13150
13151 if (svc_operand) /* OABI. */
13152 svc_number = svc_operand - 0x900000;
13153 else /* EABI. */
13154 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13155
13156 return tdep->arm_syscall_record (reg_cache, svc_number);
13157 }
13158 else
13159 {
13160 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13161 return -1;
13162 }
13163 }
13164 else if (bits_24_25 == 0x02)
13165 {
13166 if (op)
13167 {
13168 if ((coproc & 0x0e) == 0x0a)
13169 {
13170 /* 8, 16, and 32-bit transfer */
13171 return arm_record_vdata_transfer_insn (arm_insn_r);
13172 }
13173 else
13174 {
13175 if (op1_ebit)
13176 {
13177 /* MRC, MRC2 */
13178 uint32_t record_buf[1];
13179
13180 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13181 if (record_buf[0] == 15)
13182 record_buf[0] = ARM_PS_REGNUM;
13183
13184 arm_insn_r->reg_rec_count = 1;
13185 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13186 record_buf);
13187 return 0;
13188 }
13189 else
13190 {
13191 /* MCR, MCR2 */
13192 return -1;
13193 }
13194 }
13195 }
13196 else
13197 {
13198 if ((coproc & 0x0e) == 0x0a)
13199 {
13200 /* VFP data-processing instructions. */
13201 return arm_record_vfp_data_proc_insn (arm_insn_r);
13202 }
13203 else
13204 {
13205 /* CDP, CDP2 */
13206 return -1;
13207 }
13208 }
13209 }
13210 else
13211 {
13212 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13213
13214 if (op1 == 5)
13215 {
13216 if ((coproc & 0x0e) != 0x0a)
13217 {
13218 /* MRRC, MRRC2 */
13219 return -1;
13220 }
13221 }
13222 else if (op1 == 4 || op1 == 5)
13223 {
13224 if ((coproc & 0x0e) == 0x0a)
13225 {
13226 /* 64-bit transfers between ARM core and extension */
13227 return -1;
13228 }
13229 else if (op1 == 4)
13230 {
13231 /* MCRR, MCRR2 */
13232 return -1;
13233 }
13234 }
13235 else if (op1 == 0 || op1 == 1)
13236 {
13237 /* UNDEFINED */
13238 return -1;
13239 }
13240 else
13241 {
13242 if ((coproc & 0x0e) == 0x0a)
13243 {
13244 /* Extension register load/store */
13245 }
13246 else
13247 {
13248 /* STC, STC2, LDC, LDC2 */
13249 }
13250 return -1;
13251 }
13252 }
13253
13254 return -1;
13255 }
13256
13257 /* Handling opcode 000 insns. */
13258
13259 static int
13260 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13261 {
13262 uint32_t record_buf[8];
13263 uint32_t reg_src1 = 0;
13264
13265 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13266
13267 record_buf[0] = ARM_PS_REGNUM;
13268 record_buf[1] = reg_src1;
13269 thumb_insn_r->reg_rec_count = 2;
13270
13271 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13272
13273 return 0;
13274 }
13275
13276
13277 /* Handling opcode 001 insns. */
13278
13279 static int
13280 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13281 {
13282 uint32_t record_buf[8];
13283 uint32_t reg_src1 = 0;
13284
13285 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13286
13287 record_buf[0] = ARM_PS_REGNUM;
13288 record_buf[1] = reg_src1;
13289 thumb_insn_r->reg_rec_count = 2;
13290
13291 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13292
13293 return 0;
13294 }
13295
13296 /* Handling opcode 010 insns. */
13297
13298 static int
13299 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13300 {
13301 struct regcache *reg_cache = thumb_insn_r->regcache;
13302 uint32_t record_buf[8], record_buf_mem[8];
13303
13304 uint32_t reg_src1 = 0, reg_src2 = 0;
13305 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13306
13307 ULONGEST u_regval[2] = {0};
13308
13309 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13310
13311 if (bit (thumb_insn_r->arm_insn, 12))
13312 {
13313 /* Handle load/store register offset. */
13314 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13315
13316 if (in_inclusive_range (opB, 4U, 7U))
13317 {
13318 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13319 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13320 record_buf[0] = reg_src1;
13321 thumb_insn_r->reg_rec_count = 1;
13322 }
13323 else if (in_inclusive_range (opB, 0U, 2U))
13324 {
13325 /* STR(2), STRB(2), STRH(2) . */
13326 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13327 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13328 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13329 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13330 if (0 == opB)
13331 record_buf_mem[0] = 4; /* STR (2). */
13332 else if (2 == opB)
13333 record_buf_mem[0] = 1; /* STRB (2). */
13334 else if (1 == opB)
13335 record_buf_mem[0] = 2; /* STRH (2). */
13336 record_buf_mem[1] = u_regval[0] + u_regval[1];
13337 thumb_insn_r->mem_rec_count = 1;
13338 }
13339 }
13340 else if (bit (thumb_insn_r->arm_insn, 11))
13341 {
13342 /* Handle load from literal pool. */
13343 /* LDR(3). */
13344 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13345 record_buf[0] = reg_src1;
13346 thumb_insn_r->reg_rec_count = 1;
13347 }
13348 else if (opcode1)
13349 {
13350 /* Special data instructions and branch and exchange */
13351 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13352 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13353 if ((3 == opcode2) && (!opcode3))
13354 {
13355 /* Branch with exchange. */
13356 record_buf[0] = ARM_PS_REGNUM;
13357 thumb_insn_r->reg_rec_count = 1;
13358 }
13359 else
13360 {
13361 /* Format 8; special data processing insns. */
13362 record_buf[0] = ARM_PS_REGNUM;
13363 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13364 | bits (thumb_insn_r->arm_insn, 0, 2));
13365 thumb_insn_r->reg_rec_count = 2;
13366 }
13367 }
13368 else
13369 {
13370 /* Format 5; data processing insns. */
13371 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13372 if (bit (thumb_insn_r->arm_insn, 7))
13373 {
13374 reg_src1 = reg_src1 + 8;
13375 }
13376 record_buf[0] = ARM_PS_REGNUM;
13377 record_buf[1] = reg_src1;
13378 thumb_insn_r->reg_rec_count = 2;
13379 }
13380
13381 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13382 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13383 record_buf_mem);
13384
13385 return 0;
13386 }
13387
13388 /* Handling opcode 001 insns. */
13389
13390 static int
13391 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13392 {
13393 struct regcache *reg_cache = thumb_insn_r->regcache;
13394 uint32_t record_buf[8], record_buf_mem[8];
13395
13396 uint32_t reg_src1 = 0;
13397 uint32_t opcode = 0, immed_5 = 0;
13398
13399 ULONGEST u_regval = 0;
13400
13401 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13402
13403 if (opcode)
13404 {
13405 /* LDR(1). */
13406 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13407 record_buf[0] = reg_src1;
13408 thumb_insn_r->reg_rec_count = 1;
13409 }
13410 else
13411 {
13412 /* STR(1). */
13413 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13414 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13415 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13416 record_buf_mem[0] = 4;
13417 record_buf_mem[1] = u_regval + (immed_5 * 4);
13418 thumb_insn_r->mem_rec_count = 1;
13419 }
13420
13421 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13422 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13423 record_buf_mem);
13424
13425 return 0;
13426 }
13427
13428 /* Handling opcode 100 insns. */
13429
13430 static int
13431 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13432 {
13433 struct regcache *reg_cache = thumb_insn_r->regcache;
13434 uint32_t record_buf[8], record_buf_mem[8];
13435
13436 uint32_t reg_src1 = 0;
13437 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13438
13439 ULONGEST u_regval = 0;
13440
13441 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13442
13443 if (3 == opcode)
13444 {
13445 /* LDR(4). */
13446 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13447 record_buf[0] = reg_src1;
13448 thumb_insn_r->reg_rec_count = 1;
13449 }
13450 else if (1 == opcode)
13451 {
13452 /* LDRH(1). */
13453 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13454 record_buf[0] = reg_src1;
13455 thumb_insn_r->reg_rec_count = 1;
13456 }
13457 else if (2 == opcode)
13458 {
13459 /* STR(3). */
13460 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13461 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13462 record_buf_mem[0] = 4;
13463 record_buf_mem[1] = u_regval + (immed_8 * 4);
13464 thumb_insn_r->mem_rec_count = 1;
13465 }
13466 else if (0 == opcode)
13467 {
13468 /* STRH(1). */
13469 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13470 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13471 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13472 record_buf_mem[0] = 2;
13473 record_buf_mem[1] = u_regval + (immed_5 * 2);
13474 thumb_insn_r->mem_rec_count = 1;
13475 }
13476
13477 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13478 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13479 record_buf_mem);
13480
13481 return 0;
13482 }
13483
13484 /* Handling opcode 101 insns. */
13485
13486 static int
13487 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13488 {
13489 struct regcache *reg_cache = thumb_insn_r->regcache;
13490
13491 uint32_t opcode = 0;
13492 uint32_t register_bits = 0, register_count = 0;
13493 uint32_t index = 0, start_address = 0;
13494 uint32_t record_buf[24], record_buf_mem[48];
13495 uint32_t reg_src1;
13496
13497 ULONGEST u_regval = 0;
13498
13499 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13500
13501 if (opcode == 0 || opcode == 1)
13502 {
13503 /* ADR and ADD (SP plus immediate) */
13504
13505 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13506 record_buf[0] = reg_src1;
13507 thumb_insn_r->reg_rec_count = 1;
13508 }
13509 else
13510 {
13511 /* Miscellaneous 16-bit instructions */
13512 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13513
13514 switch (opcode2)
13515 {
13516 case 6:
13517 /* SETEND and CPS */
13518 break;
13519 case 0:
13520 /* ADD/SUB (SP plus immediate) */
13521 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13522 record_buf[0] = ARM_SP_REGNUM;
13523 thumb_insn_r->reg_rec_count = 1;
13524 break;
13525 case 1: /* fall through */
13526 case 3: /* fall through */
13527 case 9: /* fall through */
13528 case 11:
13529 /* CBNZ, CBZ */
13530 break;
13531 case 2:
13532 /* SXTH, SXTB, UXTH, UXTB */
13533 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13534 thumb_insn_r->reg_rec_count = 1;
13535 break;
13536 case 4: /* fall through */
13537 case 5:
13538 /* PUSH. */
13539 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13540 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13541 while (register_bits)
13542 {
13543 if (register_bits & 0x00000001)
13544 register_count++;
13545 register_bits = register_bits >> 1;
13546 }
13547 start_address = u_regval - \
13548 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13549 thumb_insn_r->mem_rec_count = register_count;
13550 while (register_count)
13551 {
13552 record_buf_mem[(register_count * 2) - 1] = start_address;
13553 record_buf_mem[(register_count * 2) - 2] = 4;
13554 start_address = start_address + 4;
13555 register_count--;
13556 }
13557 record_buf[0] = ARM_SP_REGNUM;
13558 thumb_insn_r->reg_rec_count = 1;
13559 break;
13560 case 10:
13561 /* REV, REV16, REVSH */
13562 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13563 thumb_insn_r->reg_rec_count = 1;
13564 break;
13565 case 12: /* fall through */
13566 case 13:
13567 /* POP. */
13568 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13569 while (register_bits)
13570 {
13571 if (register_bits & 0x00000001)
13572 record_buf[index++] = register_count;
13573 register_bits = register_bits >> 1;
13574 register_count++;
13575 }
13576 record_buf[index++] = ARM_PS_REGNUM;
13577 record_buf[index++] = ARM_SP_REGNUM;
13578 thumb_insn_r->reg_rec_count = index;
13579 break;
13580 case 0xe:
13581 /* BKPT insn. */
13582 /* Handle enhanced software breakpoint insn, BKPT. */
13583 /* CPSR is changed to be executed in ARM state, disabling normal
13584 interrupts, entering abort mode. */
13585 /* According to high vector configuration PC is set. */
13586 /* User hits breakpoint and type reverse, in that case, we need to go back with
13587 previous CPSR and Program Counter. */
13588 record_buf[0] = ARM_PS_REGNUM;
13589 record_buf[1] = ARM_LR_REGNUM;
13590 thumb_insn_r->reg_rec_count = 2;
13591 /* We need to save SPSR value, which is not yet done. */
13592 gdb_printf (gdb_stderr,
13593 _("Process record does not support instruction "
13594 "0x%0x at address %s.\n"),
13595 thumb_insn_r->arm_insn,
13596 paddress (thumb_insn_r->gdbarch,
13597 thumb_insn_r->this_addr));
13598 return -1;
13599
13600 case 0xf:
13601 /* If-Then, and hints */
13602 break;
13603 default:
13604 return -1;
13605 };
13606 }
13607
13608 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13609 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13610 record_buf_mem);
13611
13612 return 0;
13613 }
13614
13615 /* Handling opcode 110 insns. */
13616
13617 static int
13618 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13619 {
13620 arm_gdbarch_tdep *tdep
13621 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13622 struct regcache *reg_cache = thumb_insn_r->regcache;
13623
13624 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13625 uint32_t reg_src1 = 0;
13626 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13627 uint32_t index = 0, start_address = 0;
13628 uint32_t record_buf[24], record_buf_mem[48];
13629
13630 ULONGEST u_regval = 0;
13631
13632 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13633 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13634
13635 if (1 == opcode2)
13636 {
13637
13638 /* LDMIA. */
13639 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13640 /* Get Rn. */
13641 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13642 while (register_bits)
13643 {
13644 if (register_bits & 0x00000001)
13645 record_buf[index++] = register_count;
13646 register_bits = register_bits >> 1;
13647 register_count++;
13648 }
13649 record_buf[index++] = reg_src1;
13650 thumb_insn_r->reg_rec_count = index;
13651 }
13652 else if (0 == opcode2)
13653 {
13654 /* It handles both STMIA. */
13655 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13656 /* Get Rn. */
13657 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13658 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13659 while (register_bits)
13660 {
13661 if (register_bits & 0x00000001)
13662 register_count++;
13663 register_bits = register_bits >> 1;
13664 }
13665 start_address = u_regval;
13666 thumb_insn_r->mem_rec_count = register_count;
13667 while (register_count)
13668 {
13669 record_buf_mem[(register_count * 2) - 1] = start_address;
13670 record_buf_mem[(register_count * 2) - 2] = 4;
13671 start_address = start_address + 4;
13672 register_count--;
13673 }
13674 }
13675 else if (0x1F == opcode1)
13676 {
13677 /* Handle arm syscall insn. */
13678 if (tdep->arm_syscall_record != NULL)
13679 {
13680 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13681 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13682 }
13683 else
13684 {
13685 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13686 return -1;
13687 }
13688 }
13689
13690 /* B (1), conditional branch is automatically taken care in process_record,
13691 as PC is saved there. */
13692
13693 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13694 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13695 record_buf_mem);
13696
13697 return ret;
13698 }
13699
13700 /* Handling opcode 111 insns. */
13701
13702 static int
13703 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13704 {
13705 uint32_t record_buf[8];
13706 uint32_t bits_h = 0;
13707
13708 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13709
13710 if (2 == bits_h || 3 == bits_h)
13711 {
13712 /* BL */
13713 record_buf[0] = ARM_LR_REGNUM;
13714 thumb_insn_r->reg_rec_count = 1;
13715 }
13716 else if (1 == bits_h)
13717 {
13718 /* BLX(1). */
13719 record_buf[0] = ARM_PS_REGNUM;
13720 record_buf[1] = ARM_LR_REGNUM;
13721 thumb_insn_r->reg_rec_count = 2;
13722 }
13723
13724 /* B(2) is automatically taken care in process_record, as PC is
13725 saved there. */
13726
13727 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13728
13729 return 0;
13730 }
13731
13732 /* Handler for thumb2 load/store multiple instructions. */
13733
13734 static int
13735 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13736 {
13737 struct regcache *reg_cache = thumb2_insn_r->regcache;
13738
13739 uint32_t reg_rn, op;
13740 uint32_t register_bits = 0, register_count = 0;
13741 uint32_t index = 0, start_address = 0;
13742 uint32_t record_buf[24], record_buf_mem[48];
13743
13744 ULONGEST u_regval = 0;
13745
13746 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13747 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13748
13749 if (0 == op || 3 == op)
13750 {
13751 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13752 {
13753 /* Handle RFE instruction. */
13754 record_buf[0] = ARM_PS_REGNUM;
13755 thumb2_insn_r->reg_rec_count = 1;
13756 }
13757 else
13758 {
13759 /* Handle SRS instruction after reading banked SP. */
13760 return arm_record_unsupported_insn (thumb2_insn_r);
13761 }
13762 }
13763 else if (1 == op || 2 == op)
13764 {
13765 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13766 {
13767 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13768 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13769 while (register_bits)
13770 {
13771 if (register_bits & 0x00000001)
13772 record_buf[index++] = register_count;
13773
13774 register_count++;
13775 register_bits = register_bits >> 1;
13776 }
13777 record_buf[index++] = reg_rn;
13778 record_buf[index++] = ARM_PS_REGNUM;
13779 thumb2_insn_r->reg_rec_count = index;
13780 }
13781 else
13782 {
13783 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13784 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13785 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13786 while (register_bits)
13787 {
13788 if (register_bits & 0x00000001)
13789 register_count++;
13790
13791 register_bits = register_bits >> 1;
13792 }
13793
13794 if (1 == op)
13795 {
13796 /* Start address calculation for LDMDB/LDMEA. */
13797 start_address = u_regval;
13798 }
13799 else if (2 == op)
13800 {
13801 /* Start address calculation for LDMDB/LDMEA. */
13802 start_address = u_regval - register_count * 4;
13803 }
13804
13805 thumb2_insn_r->mem_rec_count = register_count;
13806 while (register_count)
13807 {
13808 record_buf_mem[register_count * 2 - 1] = start_address;
13809 record_buf_mem[register_count * 2 - 2] = 4;
13810 start_address = start_address + 4;
13811 register_count--;
13812 }
13813 record_buf[0] = reg_rn;
13814 record_buf[1] = ARM_PS_REGNUM;
13815 thumb2_insn_r->reg_rec_count = 2;
13816 }
13817 }
13818
13819 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13820 record_buf_mem);
13821 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13822 record_buf);
13823 return ARM_RECORD_SUCCESS;
13824 }
13825
13826 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13827 instructions. */
13828
13829 static int
13830 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13831 {
13832 struct regcache *reg_cache = thumb2_insn_r->regcache;
13833
13834 uint32_t reg_rd, reg_rn, offset_imm;
13835 uint32_t reg_dest1, reg_dest2;
13836 uint32_t address, offset_addr;
13837 uint32_t record_buf[8], record_buf_mem[8];
13838 uint32_t op1, op2, op3;
13839
13840 ULONGEST u_regval[2];
13841
13842 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13843 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13844 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13845
13846 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13847 {
13848 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13849 {
13850 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13851 record_buf[0] = reg_dest1;
13852 record_buf[1] = ARM_PS_REGNUM;
13853 thumb2_insn_r->reg_rec_count = 2;
13854 }
13855
13856 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13857 {
13858 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13859 record_buf[2] = reg_dest2;
13860 thumb2_insn_r->reg_rec_count = 3;
13861 }
13862 }
13863 else
13864 {
13865 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13866 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13867
13868 if (0 == op1 && 0 == op2)
13869 {
13870 /* Handle STREX. */
13871 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13872 address = u_regval[0] + (offset_imm * 4);
13873 record_buf_mem[0] = 4;
13874 record_buf_mem[1] = address;
13875 thumb2_insn_r->mem_rec_count = 1;
13876 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13877 record_buf[0] = reg_rd;
13878 thumb2_insn_r->reg_rec_count = 1;
13879 }
13880 else if (1 == op1 && 0 == op2)
13881 {
13882 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13883 record_buf[0] = reg_rd;
13884 thumb2_insn_r->reg_rec_count = 1;
13885 address = u_regval[0];
13886 record_buf_mem[1] = address;
13887
13888 if (4 == op3)
13889 {
13890 /* Handle STREXB. */
13891 record_buf_mem[0] = 1;
13892 thumb2_insn_r->mem_rec_count = 1;
13893 }
13894 else if (5 == op3)
13895 {
13896 /* Handle STREXH. */
13897 record_buf_mem[0] = 2 ;
13898 thumb2_insn_r->mem_rec_count = 1;
13899 }
13900 else if (7 == op3)
13901 {
13902 /* Handle STREXD. */
13903 address = u_regval[0];
13904 record_buf_mem[0] = 4;
13905 record_buf_mem[2] = 4;
13906 record_buf_mem[3] = address + 4;
13907 thumb2_insn_r->mem_rec_count = 2;
13908 }
13909 }
13910 else
13911 {
13912 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13913
13914 if (bit (thumb2_insn_r->arm_insn, 24))
13915 {
13916 if (bit (thumb2_insn_r->arm_insn, 23))
13917 offset_addr = u_regval[0] + (offset_imm * 4);
13918 else
13919 offset_addr = u_regval[0] - (offset_imm * 4);
13920
13921 address = offset_addr;
13922 }
13923 else
13924 address = u_regval[0];
13925
13926 record_buf_mem[0] = 4;
13927 record_buf_mem[1] = address;
13928 record_buf_mem[2] = 4;
13929 record_buf_mem[3] = address + 4;
13930 thumb2_insn_r->mem_rec_count = 2;
13931 record_buf[0] = reg_rn;
13932 thumb2_insn_r->reg_rec_count = 1;
13933 }
13934 }
13935
13936 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13937 record_buf);
13938 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13939 record_buf_mem);
13940 return ARM_RECORD_SUCCESS;
13941 }
13942
13943 /* Handler for thumb2 data processing (shift register and modified immediate)
13944 instructions. */
13945
13946 static int
13947 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13948 {
13949 uint32_t reg_rd, op;
13950 uint32_t record_buf[8];
13951
13952 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13953 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13954
13955 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13956 {
13957 record_buf[0] = ARM_PS_REGNUM;
13958 thumb2_insn_r->reg_rec_count = 1;
13959 }
13960 else
13961 {
13962 record_buf[0] = reg_rd;
13963 record_buf[1] = ARM_PS_REGNUM;
13964 thumb2_insn_r->reg_rec_count = 2;
13965 }
13966
13967 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13968 record_buf);
13969 return ARM_RECORD_SUCCESS;
13970 }
13971
13972 /* Generic handler for thumb2 instructions which effect destination and PS
13973 registers. */
13974
13975 static int
13976 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13977 {
13978 uint32_t reg_rd;
13979 uint32_t record_buf[8];
13980
13981 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13982
13983 record_buf[0] = reg_rd;
13984 record_buf[1] = ARM_PS_REGNUM;
13985 thumb2_insn_r->reg_rec_count = 2;
13986
13987 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13988 record_buf);
13989 return ARM_RECORD_SUCCESS;
13990 }
13991
13992 /* Handler for thumb2 branch and miscellaneous control instructions. */
13993
13994 static int
13995 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13996 {
13997 uint32_t op, op1, op2;
13998 uint32_t record_buf[8];
13999
14000 op = bits (thumb2_insn_r->arm_insn, 20, 26);
14001 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
14002 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
14003
14004 /* Handle MSR insn. */
14005 if (!(op1 & 0x2) && 0x38 == op)
14006 {
14007 if (!(op2 & 0x3))
14008 {
14009 /* CPSR is going to be changed. */
14010 record_buf[0] = ARM_PS_REGNUM;
14011 thumb2_insn_r->reg_rec_count = 1;
14012 }
14013 else
14014 {
14015 arm_record_unsupported_insn(thumb2_insn_r);
14016 return -1;
14017 }
14018 }
14019 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
14020 {
14021 /* BLX. */
14022 record_buf[0] = ARM_PS_REGNUM;
14023 record_buf[1] = ARM_LR_REGNUM;
14024 thumb2_insn_r->reg_rec_count = 2;
14025 }
14026
14027 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14028 record_buf);
14029 return ARM_RECORD_SUCCESS;
14030 }
14031
14032 /* Handler for thumb2 store single data item instructions. */
14033
14034 static int
14035 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
14036 {
14037 struct regcache *reg_cache = thumb2_insn_r->regcache;
14038
14039 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
14040 uint32_t address, offset_addr;
14041 uint32_t record_buf[8], record_buf_mem[8];
14042 uint32_t op1, op2;
14043
14044 ULONGEST u_regval[2];
14045
14046 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
14047 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
14048 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14049 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
14050
14051 if (bit (thumb2_insn_r->arm_insn, 23))
14052 {
14053 /* T2 encoding. */
14054 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
14055 offset_addr = u_regval[0] + offset_imm;
14056 address = offset_addr;
14057 }
14058 else
14059 {
14060 /* T3 encoding. */
14061 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
14062 {
14063 /* Handle STRB (register). */
14064 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
14065 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
14066 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
14067 offset_addr = u_regval[1] << shift_imm;
14068 address = u_regval[0] + offset_addr;
14069 }
14070 else
14071 {
14072 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
14073 if (bit (thumb2_insn_r->arm_insn, 10))
14074 {
14075 if (bit (thumb2_insn_r->arm_insn, 9))
14076 offset_addr = u_regval[0] + offset_imm;
14077 else
14078 offset_addr = u_regval[0] - offset_imm;
14079
14080 address = offset_addr;
14081 }
14082 else
14083 address = u_regval[0];
14084 }
14085 }
14086
14087 switch (op1)
14088 {
14089 /* Store byte instructions. */
14090 case 4:
14091 case 0:
14092 record_buf_mem[0] = 1;
14093 break;
14094 /* Store half word instructions. */
14095 case 1:
14096 case 5:
14097 record_buf_mem[0] = 2;
14098 break;
14099 /* Store word instructions. */
14100 case 2:
14101 case 6:
14102 record_buf_mem[0] = 4;
14103 break;
14104
14105 default:
14106 gdb_assert_not_reached ("no decoding pattern found");
14107 break;
14108 }
14109
14110 record_buf_mem[1] = address;
14111 thumb2_insn_r->mem_rec_count = 1;
14112 record_buf[0] = reg_rn;
14113 thumb2_insn_r->reg_rec_count = 1;
14114
14115 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14116 record_buf);
14117 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14118 record_buf_mem);
14119 return ARM_RECORD_SUCCESS;
14120 }
14121
14122 /* Handler for thumb2 load memory hints instructions. */
14123
14124 static int
14125 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14126 {
14127 uint32_t record_buf[8];
14128 uint32_t reg_rt, reg_rn;
14129
14130 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14131 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14132
14133 if (ARM_PC_REGNUM != reg_rt)
14134 {
14135 record_buf[0] = reg_rt;
14136 record_buf[1] = reg_rn;
14137 record_buf[2] = ARM_PS_REGNUM;
14138 thumb2_insn_r->reg_rec_count = 3;
14139
14140 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14141 record_buf);
14142 return ARM_RECORD_SUCCESS;
14143 }
14144
14145 return ARM_RECORD_FAILURE;
14146 }
14147
14148 /* Handler for thumb2 load word instructions. */
14149
14150 static int
14151 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14152 {
14153 uint32_t record_buf[8];
14154
14155 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14156 record_buf[1] = ARM_PS_REGNUM;
14157 thumb2_insn_r->reg_rec_count = 2;
14158
14159 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14160 record_buf);
14161 return ARM_RECORD_SUCCESS;
14162 }
14163
14164 /* Handler for thumb2 long multiply, long multiply accumulate, and
14165 divide instructions. */
14166
14167 static int
14168 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14169 {
14170 uint32_t opcode1 = 0, opcode2 = 0;
14171 uint32_t record_buf[8];
14172
14173 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14174 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14175
14176 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14177 {
14178 /* Handle SMULL, UMULL, SMULAL. */
14179 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14180 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14181 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14182 record_buf[2] = ARM_PS_REGNUM;
14183 thumb2_insn_r->reg_rec_count = 3;
14184 }
14185 else if (1 == opcode1 || 3 == opcode2)
14186 {
14187 /* Handle SDIV and UDIV. */
14188 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14189 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14190 record_buf[2] = ARM_PS_REGNUM;
14191 thumb2_insn_r->reg_rec_count = 3;
14192 }
14193 else
14194 return ARM_RECORD_FAILURE;
14195
14196 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14197 record_buf);
14198 return ARM_RECORD_SUCCESS;
14199 }
14200
14201 /* Record handler for thumb32 coprocessor instructions. */
14202
14203 static int
14204 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14205 {
14206 if (bit (thumb2_insn_r->arm_insn, 25))
14207 return arm_record_coproc_data_proc (thumb2_insn_r);
14208 else
14209 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14210 }
14211
14212 /* Record handler for advance SIMD structure load/store instructions. */
14213
14214 static int
14215 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14216 {
14217 struct regcache *reg_cache = thumb2_insn_r->regcache;
14218 uint32_t l_bit, a_bit, b_bits;
14219 uint32_t record_buf[128], record_buf_mem[128];
14220 uint32_t reg_rn, reg_vd, address, f_elem;
14221 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14222 uint8_t f_ebytes;
14223
14224 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14225 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14226 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14227 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14228 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14229 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14230 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14231 f_elem = 8 / f_ebytes;
14232
14233 if (!l_bit)
14234 {
14235 ULONGEST u_regval = 0;
14236 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14237 address = u_regval;
14238
14239 if (!a_bit)
14240 {
14241 /* Handle VST1. */
14242 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14243 {
14244 if (b_bits == 0x07)
14245 bf_regs = 1;
14246 else if (b_bits == 0x0a)
14247 bf_regs = 2;
14248 else if (b_bits == 0x06)
14249 bf_regs = 3;
14250 else if (b_bits == 0x02)
14251 bf_regs = 4;
14252 else
14253 bf_regs = 0;
14254
14255 for (index_r = 0; index_r < bf_regs; index_r++)
14256 {
14257 for (index_e = 0; index_e < f_elem; index_e++)
14258 {
14259 record_buf_mem[index_m++] = f_ebytes;
14260 record_buf_mem[index_m++] = address;
14261 address = address + f_ebytes;
14262 thumb2_insn_r->mem_rec_count += 1;
14263 }
14264 }
14265 }
14266 /* Handle VST2. */
14267 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14268 {
14269 if (b_bits == 0x09 || b_bits == 0x08)
14270 bf_regs = 1;
14271 else if (b_bits == 0x03)
14272 bf_regs = 2;
14273 else
14274 bf_regs = 0;
14275
14276 for (index_r = 0; index_r < bf_regs; index_r++)
14277 for (index_e = 0; index_e < f_elem; index_e++)
14278 {
14279 for (loop_t = 0; loop_t < 2; loop_t++)
14280 {
14281 record_buf_mem[index_m++] = f_ebytes;
14282 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14283 thumb2_insn_r->mem_rec_count += 1;
14284 }
14285 address = address + (2 * f_ebytes);
14286 }
14287 }
14288 /* Handle VST3. */
14289 else if ((b_bits & 0x0e) == 0x04)
14290 {
14291 for (index_e = 0; index_e < f_elem; index_e++)
14292 {
14293 for (loop_t = 0; loop_t < 3; loop_t++)
14294 {
14295 record_buf_mem[index_m++] = f_ebytes;
14296 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14297 thumb2_insn_r->mem_rec_count += 1;
14298 }
14299 address = address + (3 * f_ebytes);
14300 }
14301 }
14302 /* Handle VST4. */
14303 else if (!(b_bits & 0x0e))
14304 {
14305 for (index_e = 0; index_e < f_elem; index_e++)
14306 {
14307 for (loop_t = 0; loop_t < 4; loop_t++)
14308 {
14309 record_buf_mem[index_m++] = f_ebytes;
14310 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14311 thumb2_insn_r->mem_rec_count += 1;
14312 }
14313 address = address + (4 * f_ebytes);
14314 }
14315 }
14316 }
14317 else
14318 {
14319 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14320
14321 if (bft_size == 0x00)
14322 f_ebytes = 1;
14323 else if (bft_size == 0x01)
14324 f_ebytes = 2;
14325 else if (bft_size == 0x02)
14326 f_ebytes = 4;
14327 else
14328 f_ebytes = 0;
14329
14330 /* Handle VST1. */
14331 if (!(b_bits & 0x0b) || b_bits == 0x08)
14332 thumb2_insn_r->mem_rec_count = 1;
14333 /* Handle VST2. */
14334 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14335 thumb2_insn_r->mem_rec_count = 2;
14336 /* Handle VST3. */
14337 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14338 thumb2_insn_r->mem_rec_count = 3;
14339 /* Handle VST4. */
14340 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14341 thumb2_insn_r->mem_rec_count = 4;
14342
14343 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14344 {
14345 record_buf_mem[index_m] = f_ebytes;
14346 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14347 }
14348 }
14349 }
14350 else
14351 {
14352 if (!a_bit)
14353 {
14354 /* Handle VLD1. */
14355 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14356 thumb2_insn_r->reg_rec_count = 1;
14357 /* Handle VLD2. */
14358 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14359 thumb2_insn_r->reg_rec_count = 2;
14360 /* Handle VLD3. */
14361 else if ((b_bits & 0x0e) == 0x04)
14362 thumb2_insn_r->reg_rec_count = 3;
14363 /* Handle VLD4. */
14364 else if (!(b_bits & 0x0e))
14365 thumb2_insn_r->reg_rec_count = 4;
14366 }
14367 else
14368 {
14369 /* Handle VLD1. */
14370 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14371 thumb2_insn_r->reg_rec_count = 1;
14372 /* Handle VLD2. */
14373 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14374 thumb2_insn_r->reg_rec_count = 2;
14375 /* Handle VLD3. */
14376 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14377 thumb2_insn_r->reg_rec_count = 3;
14378 /* Handle VLD4. */
14379 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14380 thumb2_insn_r->reg_rec_count = 4;
14381
14382 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14383 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14384 }
14385 }
14386
14387 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14388 {
14389 record_buf[index_r] = reg_rn;
14390 thumb2_insn_r->reg_rec_count += 1;
14391 }
14392
14393 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14394 record_buf);
14395 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14396 record_buf_mem);
14397 return 0;
14398 }
14399
14400 /* Decodes thumb2 instruction type and invokes its record handler. */
14401
14402 static unsigned int
14403 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14404 {
14405 uint32_t op, op1, op2;
14406
14407 op = bit (thumb2_insn_r->arm_insn, 15);
14408 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14409 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14410
14411 if (op1 == 0x01)
14412 {
14413 if (!(op2 & 0x64 ))
14414 {
14415 /* Load/store multiple instruction. */
14416 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14417 }
14418 else if ((op2 & 0x64) == 0x4)
14419 {
14420 /* Load/store (dual/exclusive) and table branch instruction. */
14421 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14422 }
14423 else if ((op2 & 0x60) == 0x20)
14424 {
14425 /* Data-processing (shifted register). */
14426 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14427 }
14428 else if (op2 & 0x40)
14429 {
14430 /* Co-processor instructions. */
14431 return thumb2_record_coproc_insn (thumb2_insn_r);
14432 }
14433 }
14434 else if (op1 == 0x02)
14435 {
14436 if (op)
14437 {
14438 /* Branches and miscellaneous control instructions. */
14439 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14440 }
14441 else if (op2 & 0x20)
14442 {
14443 /* Data-processing (plain binary immediate) instruction. */
14444 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14445 }
14446 else
14447 {
14448 /* Data-processing (modified immediate). */
14449 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14450 }
14451 }
14452 else if (op1 == 0x03)
14453 {
14454 if (!(op2 & 0x71 ))
14455 {
14456 /* Store single data item. */
14457 return thumb2_record_str_single_data (thumb2_insn_r);
14458 }
14459 else if (!((op2 & 0x71) ^ 0x10))
14460 {
14461 /* Advanced SIMD or structure load/store instructions. */
14462 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14463 }
14464 else if (!((op2 & 0x67) ^ 0x01))
14465 {
14466 /* Load byte, memory hints instruction. */
14467 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14468 }
14469 else if (!((op2 & 0x67) ^ 0x03))
14470 {
14471 /* Load halfword, memory hints instruction. */
14472 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14473 }
14474 else if (!((op2 & 0x67) ^ 0x05))
14475 {
14476 /* Load word instruction. */
14477 return thumb2_record_ld_word (thumb2_insn_r);
14478 }
14479 else if (!((op2 & 0x70) ^ 0x20))
14480 {
14481 /* Data-processing (register) instruction. */
14482 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14483 }
14484 else if (!((op2 & 0x78) ^ 0x30))
14485 {
14486 /* Multiply, multiply accumulate, abs diff instruction. */
14487 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14488 }
14489 else if (!((op2 & 0x78) ^ 0x38))
14490 {
14491 /* Long multiply, long multiply accumulate, and divide. */
14492 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14493 }
14494 else if (op2 & 0x40)
14495 {
14496 /* Co-processor instructions. */
14497 return thumb2_record_coproc_insn (thumb2_insn_r);
14498 }
14499 }
14500
14501 return -1;
14502 }
14503
14504 namespace {
14505 /* Abstract instruction reader. */
14506
14507 class abstract_instruction_reader
14508 {
14509 public:
14510 /* Read one instruction of size LEN from address MEMADDR and using
14511 BYTE_ORDER endianness. */
14512
14513 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14514 enum bfd_endian byte_order) = 0;
14515 };
14516
14517 /* Instruction reader from real target. */
14518
14519 class instruction_reader : public abstract_instruction_reader
14520 {
14521 public:
14522 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14523 enum bfd_endian byte_order) override
14524 {
14525 return read_code_unsigned_integer (memaddr, len, byte_order);
14526 }
14527 };
14528
14529 } // namespace
14530
14531 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14532
14533 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14534 dispatch it. */
14535
14536 static int
14537 decode_insn (abstract_instruction_reader &reader,
14538 arm_insn_decode_record *arm_record,
14539 record_type_t record_type, uint32_t insn_size)
14540 {
14541
14542 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14543 instruction. */
14544 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14545 {
14546 arm_record_data_proc_misc_ld_str, /* 000. */
14547 arm_record_data_proc_imm, /* 001. */
14548 arm_record_ld_st_imm_offset, /* 010. */
14549 arm_record_ld_st_reg_offset, /* 011. */
14550 arm_record_ld_st_multiple, /* 100. */
14551 arm_record_b_bl, /* 101. */
14552 arm_record_asimd_vfp_coproc, /* 110. */
14553 arm_record_coproc_data_proc /* 111. */
14554 };
14555
14556 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14557 instruction. */
14558 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14559 { \
14560 thumb_record_shift_add_sub, /* 000. */
14561 thumb_record_add_sub_cmp_mov, /* 001. */
14562 thumb_record_ld_st_reg_offset, /* 010. */
14563 thumb_record_ld_st_imm_offset, /* 011. */
14564 thumb_record_ld_st_stack, /* 100. */
14565 thumb_record_misc, /* 101. */
14566 thumb_record_ldm_stm_swi, /* 110. */
14567 thumb_record_branch /* 111. */
14568 };
14569
14570 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14571 uint32_t insn_id = 0;
14572 enum bfd_endian code_endian
14573 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14574 arm_record->arm_insn
14575 = reader.read (arm_record->this_addr, insn_size, code_endian);
14576
14577 if (ARM_RECORD == record_type)
14578 {
14579 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14580 insn_id = bits (arm_record->arm_insn, 25, 27);
14581
14582 if (arm_record->cond == 0xf)
14583 ret = arm_record_extension_space (arm_record);
14584 else
14585 {
14586 /* If this insn has fallen into extension space
14587 then we need not decode it anymore. */
14588 ret = arm_handle_insn[insn_id] (arm_record);
14589 }
14590 if (ret != ARM_RECORD_SUCCESS)
14591 {
14592 arm_record_unsupported_insn (arm_record);
14593 ret = -1;
14594 }
14595 }
14596 else if (THUMB_RECORD == record_type)
14597 {
14598 /* As thumb does not have condition codes, we set negative. */
14599 arm_record->cond = -1;
14600 insn_id = bits (arm_record->arm_insn, 13, 15);
14601 ret = thumb_handle_insn[insn_id] (arm_record);
14602 if (ret != ARM_RECORD_SUCCESS)
14603 {
14604 arm_record_unsupported_insn (arm_record);
14605 ret = -1;
14606 }
14607 }
14608 else if (THUMB2_RECORD == record_type)
14609 {
14610 /* As thumb does not have condition codes, we set negative. */
14611 arm_record->cond = -1;
14612
14613 /* Swap first half of 32bit thumb instruction with second half. */
14614 arm_record->arm_insn
14615 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14616
14617 ret = thumb2_record_decode_insn_handler (arm_record);
14618
14619 if (ret != ARM_RECORD_SUCCESS)
14620 {
14621 arm_record_unsupported_insn (arm_record);
14622 ret = -1;
14623 }
14624 }
14625 else
14626 {
14627 /* Throw assertion. */
14628 gdb_assert_not_reached ("not a valid instruction, could not decode");
14629 }
14630
14631 return ret;
14632 }
14633
14634 #if GDB_SELF_TEST
14635 namespace selftests {
14636
14637 /* Instruction reader class for selftests.
14638
14639 For 16-bit Thumb instructions, an array of uint16_t should be used.
14640
14641 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14642 of uint32_t should be used. */
14643
14644 template<typename T>
14645 class instruction_reader_selftest : public abstract_instruction_reader
14646 {
14647 public:
14648 template<size_t SIZE>
14649 instruction_reader_selftest (const T (&insns)[SIZE])
14650 : m_insns (insns), m_insns_size (SIZE)
14651 {}
14652
14653 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14654 enum bfd_endian byte_order) override
14655 {
14656 SELF_CHECK (length == sizeof (T));
14657 SELF_CHECK (memaddr % sizeof (T) == 0);
14658 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14659
14660 return m_insns[memaddr / sizeof (T)];
14661 }
14662
14663 private:
14664 const T *m_insns;
14665 const size_t m_insns_size;
14666 };
14667
14668 static void
14669 arm_record_test (void)
14670 {
14671 struct gdbarch_info info;
14672 info.bfd_arch_info = bfd_scan_arch ("arm");
14673
14674 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14675
14676 SELF_CHECK (gdbarch != NULL);
14677
14678 /* 16-bit Thumb instructions. */
14679 {
14680 arm_insn_decode_record arm_record;
14681
14682 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14683 arm_record.gdbarch = gdbarch;
14684
14685 /* Use the endian-free representation of the instructions here. The test
14686 will handle endianness conversions. */
14687 static const uint16_t insns[] = {
14688 /* db b2 uxtb r3, r3 */
14689 0xb2db,
14690 /* cd 58 ldr r5, [r1, r3] */
14691 0x58cd,
14692 };
14693
14694 instruction_reader_selftest<uint16_t> reader (insns);
14695 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14696 THUMB_INSN_SIZE_BYTES);
14697
14698 SELF_CHECK (ret == 0);
14699 SELF_CHECK (arm_record.mem_rec_count == 0);
14700 SELF_CHECK (arm_record.reg_rec_count == 1);
14701 SELF_CHECK (arm_record.arm_regs[0] == 3);
14702
14703 arm_record.this_addr += 2;
14704 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14705 THUMB_INSN_SIZE_BYTES);
14706
14707 SELF_CHECK (ret == 0);
14708 SELF_CHECK (arm_record.mem_rec_count == 0);
14709 SELF_CHECK (arm_record.reg_rec_count == 1);
14710 SELF_CHECK (arm_record.arm_regs[0] == 5);
14711 }
14712
14713 /* 32-bit Thumb-2 instructions. */
14714 {
14715 arm_insn_decode_record arm_record;
14716
14717 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14718 arm_record.gdbarch = gdbarch;
14719
14720 /* Use the endian-free representation of the instruction here. The test
14721 will handle endianness conversions. */
14722 static const uint32_t insns[] = {
14723 /* mrc 15, 0, r7, cr13, cr0, {3} */
14724 0x7f70ee1d,
14725 };
14726
14727 instruction_reader_selftest<uint32_t> reader (insns);
14728 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14729 THUMB2_INSN_SIZE_BYTES);
14730
14731 SELF_CHECK (ret == 0);
14732 SELF_CHECK (arm_record.mem_rec_count == 0);
14733 SELF_CHECK (arm_record.reg_rec_count == 1);
14734 SELF_CHECK (arm_record.arm_regs[0] == 7);
14735 }
14736
14737 /* 32-bit instructions. */
14738 {
14739 arm_insn_decode_record arm_record;
14740
14741 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14742 arm_record.gdbarch = gdbarch;
14743
14744 /* Use the endian-free representation of the instruction here. The test
14745 will handle endianness conversions. */
14746 static const uint32_t insns[] = {
14747 /* mov r5, r0 */
14748 0xe1a05000,
14749 };
14750
14751 instruction_reader_selftest<uint32_t> reader (insns);
14752 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14753 ARM_INSN_SIZE_BYTES);
14754
14755 SELF_CHECK (ret == 0);
14756 }
14757 }
14758
14759 /* Instruction reader from manually cooked instruction sequences. */
14760
14761 class test_arm_instruction_reader : public arm_instruction_reader
14762 {
14763 public:
14764 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14765 : m_insns (insns)
14766 {}
14767
14768 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14769 {
14770 SELF_CHECK (memaddr % 4 == 0);
14771 SELF_CHECK (memaddr / 4 < m_insns.size ());
14772
14773 return m_insns[memaddr / 4];
14774 }
14775
14776 private:
14777 const gdb::array_view<const uint32_t> m_insns;
14778 };
14779
14780 static void
14781 arm_analyze_prologue_test ()
14782 {
14783 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14784 {
14785 struct gdbarch_info info;
14786 info.byte_order = endianness;
14787 info.byte_order_for_code = endianness;
14788 info.bfd_arch_info = bfd_scan_arch ("arm");
14789
14790 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14791
14792 SELF_CHECK (gdbarch != NULL);
14793
14794 /* The "sub" instruction contains an immediate value rotate count of 0,
14795 which resulted in a 32-bit shift of a 32-bit value, caught by
14796 UBSan. */
14797 const uint32_t insns[] = {
14798 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14799 0xe1a05000, /* mov r5, r0 */
14800 0xe5903020, /* ldr r3, [r0, #32] */
14801 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14802 };
14803
14804 test_arm_instruction_reader mem_reader (insns);
14805 arm_prologue_cache cache;
14806 arm_cache_init (&cache, gdbarch);
14807
14808 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14809 }
14810 }
14811
14812 } // namespace selftests
14813 #endif /* GDB_SELF_TEST */
14814
14815 /* Cleans up local record registers and memory allocations. */
14816
14817 static void
14818 deallocate_reg_mem (arm_insn_decode_record *record)
14819 {
14820 xfree (record->arm_regs);
14821 xfree (record->arm_mems);
14822 }
14823
14824
14825 /* Parse the current instruction and record the values of the registers and
14826 memory that will be changed in current instruction to record_arch_list".
14827 Return -1 if something is wrong. */
14828
14829 int
14830 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14831 CORE_ADDR insn_addr)
14832 {
14833
14834 uint32_t no_of_rec = 0;
14835 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14836 ULONGEST t_bit = 0, insn_id = 0;
14837
14838 ULONGEST u_regval = 0;
14839
14840 arm_insn_decode_record arm_record;
14841
14842 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14843 arm_record.regcache = regcache;
14844 arm_record.this_addr = insn_addr;
14845 arm_record.gdbarch = gdbarch;
14846
14847
14848 if (record_debug > 1)
14849 {
14850 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14851 "addr = %s\n",
14852 paddress (gdbarch, arm_record.this_addr));
14853 }
14854
14855 instruction_reader reader;
14856 enum bfd_endian code_endian
14857 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14858 arm_record.arm_insn
14859 = reader.read (arm_record.this_addr, 2, code_endian);
14860
14861 /* Check the insn, whether it is thumb or arm one. */
14862
14863 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14864 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14865
14866
14867 if (!(u_regval & t_bit))
14868 {
14869 /* We are decoding arm insn. */
14870 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14871 }
14872 else
14873 {
14874 insn_id = bits (arm_record.arm_insn, 11, 15);
14875 /* is it thumb2 insn? */
14876 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14877 {
14878 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14879 THUMB2_INSN_SIZE_BYTES);
14880 }
14881 else
14882 {
14883 /* We are decoding thumb insn. */
14884 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14885 THUMB_INSN_SIZE_BYTES);
14886 }
14887 }
14888
14889 if (0 == ret)
14890 {
14891 /* Record registers. */
14892 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14893 if (arm_record.arm_regs)
14894 {
14895 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14896 {
14897 if (record_full_arch_list_add_reg
14898 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14899 ret = -1;
14900 }
14901 }
14902 /* Record memories. */
14903 if (arm_record.arm_mems)
14904 {
14905 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14906 {
14907 if (record_full_arch_list_add_mem
14908 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14909 arm_record.arm_mems[no_of_rec].len))
14910 ret = -1;
14911 }
14912 }
14913
14914 if (record_full_arch_list_add_end ())
14915 ret = -1;
14916 }
14917
14918
14919 deallocate_reg_mem (&arm_record);
14920
14921 return ret;
14922 }
14923
14924 /* See arm-tdep.h. */
14925
14926 const target_desc *
14927 arm_read_description (arm_fp_type fp_type, bool tls)
14928 {
14929 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14930
14931 if (tdesc == nullptr)
14932 {
14933 tdesc = arm_create_target_description (fp_type, tls);
14934 tdesc_arm_list[fp_type][tls] = tdesc;
14935 }
14936
14937 return tdesc;
14938 }
14939
14940 /* See arm-tdep.h. */
14941
14942 const target_desc *
14943 arm_read_mprofile_description (arm_m_profile_type m_type)
14944 {
14945 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14946
14947 if (tdesc == nullptr)
14948 {
14949 tdesc = arm_create_mprofile_target_description (m_type);
14950 tdesc_arm_mprofile_list[m_type] = tdesc;
14951 }
14952
14953 return tdesc;
14954 }