gdb: add interp::on_tsv_deleted method
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "language.h"
26 #include "inferior.h"
27 #include "infrun.h"
28 #include "gdbcmd.h"
29 #include "gdbcore.h"
30 #include "dis-asm.h" /* For register styles. */
31 #include "disasm.h"
32 #include "regcache.h"
33 #include "reggroups.h"
34 #include "target-float.h"
35 #include "value.h"
36 #include "arch-utils.h"
37 #include "osabi.h"
38 #include "frame-unwind.h"
39 #include "frame-base.h"
40 #include "trad-frame.h"
41 #include "objfiles.h"
42 #include "dwarf2.h"
43 #include "dwarf2/frame.h"
44 #include "gdbtypes.h"
45 #include "prologue-value.h"
46 #include "remote.h"
47 #include "target-descriptions.h"
48 #include "user-regs.h"
49 #include "observable.h"
50 #include "count-one-bits.h"
51
52 #include "arch/arm.h"
53 #include "arch/arm-get-next-pcs.h"
54 #include "arm-tdep.h"
55 #include "sim/sim-arm.h"
56
57 #include "elf-bfd.h"
58 #include "coff/internal.h"
59 #include "elf/arm.h"
60
61 #include "record.h"
62 #include "record-full.h"
63 #include <algorithm>
64
65 #include "producer.h"
66
67 #if GDB_SELF_TEST
68 #include "gdbsupport/selftest.h"
69 #endif
70
71 static bool arm_debug;
72
73 /* Print an "arm" debug statement. */
74
75 #define arm_debug_printf(fmt, ...) \
76 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 (msym)->set_target_flag_1 (true)
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 (msym)->target_flag_1 ()
90
91 struct arm_mapping_symbol
92 {
93 CORE_ADDR value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98 };
99
100 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102 struct arm_per_bfd
103 {
104 explicit arm_per_bfd (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124 };
125
126 /* Per-bfd data used for mapping symbols. */
127 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* All possible arm target descriptors. */
244 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
245 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
246
247 /* This is used to keep the bfd arch_info in sync with the disassembly
248 style. */
249 static void set_disassembly_style_sfunc (const char *, int,
250 struct cmd_list_element *);
251 static void show_disassembly_style_sfunc (struct ui_file *, int,
252 struct cmd_list_element *,
253 const char *);
254
255 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
256 readable_regcache *regcache,
257 int regnum, gdb_byte *buf);
258 static void arm_neon_quad_write (struct gdbarch *gdbarch,
259 struct regcache *regcache,
260 int regnum, const gdb_byte *buf);
261
262 static CORE_ADDR
263 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264
265
266 /* get_next_pcs operations. */
267 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
268 arm_get_next_pcs_read_memory_unsigned_integer,
269 arm_get_next_pcs_syscall_next_pc,
270 arm_get_next_pcs_addr_bits_remove,
271 arm_get_next_pcs_is_thumb,
272 NULL,
273 };
274
275 struct arm_prologue_cache
276 {
277 /* The stack pointer at the time this frame was created; i.e. the
278 caller's stack pointer when this function was called. It is used
279 to identify this frame. */
280 CORE_ADDR sp;
281
282 /* Additional stack pointers used by M-profile with Security extension. */
283 /* Use msp_s / psp_s to hold the values of msp / psp when there is
284 no Security extension. */
285 CORE_ADDR msp_s;
286 CORE_ADDR msp_ns;
287 CORE_ADDR psp_s;
288 CORE_ADDR psp_ns;
289
290 /* Active stack pointer. */
291 int active_sp_regnum;
292 int active_msp_regnum;
293 int active_psp_regnum;
294
295 /* The frame base for this frame is just prev_sp - frame size.
296 FRAMESIZE is the distance from the frame pointer to the
297 initial stack pointer. */
298
299 int framesize;
300
301 /* The register used to hold the frame pointer for this frame. */
302 int framereg;
303
304 /* True if the return address is signed, false otherwise. */
305 gdb::optional<bool> ra_signed_state;
306
307 /* Saved register offsets. */
308 trad_frame_saved_reg *saved_regs;
309
310 arm_prologue_cache() = default;
311 };
312
313
314 /* Reconstruct T bit in program status register from LR value. */
315
316 static inline ULONGEST
317 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
318 {
319 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
320 if (IS_THUMB_ADDR (lr))
321 psr |= t_bit;
322 else
323 psr &= ~t_bit;
324
325 return psr;
326 }
327
328 /* Initialize CACHE fields for which zero is not adequate (CACHE is
329 expected to have been ZALLOC'ed before calling this function). */
330
331 static void
332 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
333 {
334 cache->active_sp_regnum = ARM_SP_REGNUM;
335
336 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
337 }
338
339 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
340
341 static void
342 arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame)
343 {
344 struct gdbarch *gdbarch = get_frame_arch (frame);
345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
346
347 arm_cache_init (cache, gdbarch);
348 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
349
350 if (tdep->have_sec_ext)
351 {
352 const CORE_ADDR msp_val
353 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
354 const CORE_ADDR psp_val
355 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
356
357 cache->msp_s
358 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
359 cache->msp_ns
360 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
361 cache->psp_s
362 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
363 cache->psp_ns
364 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
365
366 /* Identify what msp is alias for (msp_s or msp_ns). */
367 if (msp_val == cache->msp_s)
368 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
369 else if (msp_val == cache->msp_ns)
370 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
371 else
372 {
373 warning (_("Invalid state, unable to determine msp alias, assuming "
374 "msp_s."));
375 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
376 }
377
378 /* Identify what psp is alias for (psp_s or psp_ns). */
379 if (psp_val == cache->psp_s)
380 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
381 else if (psp_val == cache->psp_ns)
382 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
383 else
384 {
385 warning (_("Invalid state, unable to determine psp alias, assuming "
386 "psp_s."));
387 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
388 }
389
390 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
391 if (msp_val == cache->sp)
392 cache->active_sp_regnum = cache->active_msp_regnum;
393 else if (psp_val == cache->sp)
394 cache->active_sp_regnum = cache->active_psp_regnum;
395 else
396 {
397 warning (_("Invalid state, unable to determine sp alias, assuming "
398 "msp."));
399 cache->active_sp_regnum = cache->active_msp_regnum;
400 }
401 }
402 else if (tdep->is_m)
403 {
404 cache->msp_s
405 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
406 cache->psp_s
407 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
408
409 /* Identify what sp is alias for (msp or psp). */
410 if (cache->msp_s == cache->sp)
411 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
412 else if (cache->psp_s == cache->sp)
413 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
414 else
415 {
416 warning (_("Invalid state, unable to determine sp alias, assuming "
417 "msp."));
418 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
419 }
420 }
421 else
422 {
423 cache->msp_s
424 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
425
426 cache->active_sp_regnum = ARM_SP_REGNUM;
427 }
428 }
429
430 /* Return the requested stack pointer value (in REGNUM), taking into
431 account whether we have a Security extension or an M-profile
432 CPU. */
433
434 static CORE_ADDR
435 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
436 arm_gdbarch_tdep *tdep, int regnum)
437 {
438 if (tdep->have_sec_ext)
439 {
440 if (regnum == tdep->m_profile_msp_s_regnum)
441 return cache->msp_s;
442 if (regnum == tdep->m_profile_msp_ns_regnum)
443 return cache->msp_ns;
444 if (regnum == tdep->m_profile_psp_s_regnum)
445 return cache->psp_s;
446 if (regnum == tdep->m_profile_psp_ns_regnum)
447 return cache->psp_ns;
448 if (regnum == tdep->m_profile_msp_regnum)
449 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
450 if (regnum == tdep->m_profile_psp_regnum)
451 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
452 if (regnum == ARM_SP_REGNUM)
453 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
454 }
455 else if (tdep->is_m)
456 {
457 if (regnum == tdep->m_profile_msp_regnum)
458 return cache->msp_s;
459 if (regnum == tdep->m_profile_psp_regnum)
460 return cache->psp_s;
461 if (regnum == ARM_SP_REGNUM)
462 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
463 }
464 else if (regnum == ARM_SP_REGNUM)
465 return cache->sp;
466
467 gdb_assert_not_reached ("Invalid SP selection");
468 }
469
470 /* Return the previous stack address, depending on which SP register
471 is active. */
472
473 static CORE_ADDR
474 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
475 {
476 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
477 return val;
478 }
479
480 /* Set the active stack pointer to VAL. */
481
482 static void
483 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
484 arm_gdbarch_tdep *tdep, CORE_ADDR val)
485 {
486 if (tdep->have_sec_ext)
487 {
488 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
489 cache->msp_s = val;
490 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
491 cache->msp_ns = val;
492 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
493 cache->psp_s = val;
494 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
495 cache->psp_ns = val;
496
497 return;
498 }
499 else if (tdep->is_m)
500 {
501 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
502 cache->msp_s = val;
503 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
504 cache->psp_s = val;
505
506 return;
507 }
508 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
509 {
510 cache->sp = val;
511 return;
512 }
513
514 gdb_assert_not_reached ("Invalid SP selection");
515 }
516
517 /* Return true if REGNUM is one of the alternative stack pointers. */
518
519 static bool
520 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
521 {
522 if ((regnum == tdep->m_profile_msp_regnum)
523 || (regnum == tdep->m_profile_msp_s_regnum)
524 || (regnum == tdep->m_profile_msp_ns_regnum)
525 || (regnum == tdep->m_profile_psp_regnum)
526 || (regnum == tdep->m_profile_psp_s_regnum)
527 || (regnum == tdep->m_profile_psp_ns_regnum))
528 return true;
529 else
530 return false;
531 }
532
533 /* Set the active stack pointer to SP_REGNUM. */
534
535 static void
536 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
537 arm_gdbarch_tdep *tdep, int sp_regnum)
538 {
539 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
540
541 if (tdep->have_sec_ext)
542 {
543 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
544 && sp_regnum != tdep->m_profile_psp_regnum);
545
546 if (sp_regnum == tdep->m_profile_msp_s_regnum
547 || sp_regnum == tdep->m_profile_psp_s_regnum)
548 {
549 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
550 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
551 }
552 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
553 || sp_regnum == tdep->m_profile_psp_ns_regnum)
554 {
555 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
556 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
557 }
558 }
559
560 cache->active_sp_regnum = sp_regnum;
561 }
562
563 namespace {
564
565 /* Abstract class to read ARM instructions from memory. */
566
567 class arm_instruction_reader
568 {
569 public:
570 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
571 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
572 };
573
574 /* Read instructions from target memory. */
575
576 class target_arm_instruction_reader : public arm_instruction_reader
577 {
578 public:
579 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
580 {
581 return read_code_unsigned_integer (memaddr, 4, byte_order);
582 }
583 };
584
585 } /* namespace */
586
587 static CORE_ADDR arm_analyze_prologue
588 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
589 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
590
591 /* Architecture version for displaced stepping. This effects the behaviour of
592 certain instructions, and really should not be hard-wired. */
593
594 #define DISPLACED_STEPPING_ARCH_VERSION 5
595
596 /* See arm-tdep.h. */
597
598 bool arm_apcs_32 = true;
599 bool arm_unwind_secure_frames = true;
600
601 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
602
603 int
604 arm_psr_thumb_bit (struct gdbarch *gdbarch)
605 {
606 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
607
608 if (tdep->is_m)
609 return XPSR_T;
610 else
611 return CPSR_T;
612 }
613
614 /* Determine if the processor is currently executing in Thumb mode. */
615
616 int
617 arm_is_thumb (struct regcache *regcache)
618 {
619 ULONGEST cpsr;
620 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
621
622 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
623
624 return (cpsr & t_bit) != 0;
625 }
626
627 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
628 frame. */
629
630 int
631 arm_frame_is_thumb (frame_info_ptr frame)
632 {
633 /* Check the architecture of FRAME. */
634 struct gdbarch *gdbarch = get_frame_arch (frame);
635 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
636
637 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
638 directly (from a signal frame or dummy frame) or by interpreting
639 the saved LR (from a prologue or DWARF frame). So consult it and
640 trust the unwinders. */
641 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
642
643 /* Find and extract the thumb bit. */
644 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
645 return (cpsr & t_bit) != 0;
646 }
647
648 /* Search for the mapping symbol covering MEMADDR. If one is found,
649 return its type. Otherwise, return 0. If START is non-NULL,
650 set *START to the location of the mapping symbol. */
651
652 static char
653 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
654 {
655 struct obj_section *sec;
656
657 /* If there are mapping symbols, consult them. */
658 sec = find_pc_section (memaddr);
659 if (sec != NULL)
660 {
661 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
662 if (data != NULL)
663 {
664 unsigned int section_idx = sec->the_bfd_section->index;
665 arm_mapping_symbol_vec &map
666 = data->section_maps[section_idx];
667
668 /* Sort the vector on first use. */
669 if (!data->section_maps_sorted[section_idx])
670 {
671 std::sort (map.begin (), map.end ());
672 data->section_maps_sorted[section_idx] = true;
673 }
674
675 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
676 arm_mapping_symbol_vec::const_iterator it
677 = std::lower_bound (map.begin (), map.end (), map_key);
678
679 /* std::lower_bound finds the earliest ordered insertion
680 point. If the symbol at this position starts at this exact
681 address, we use that; otherwise, the preceding
682 mapping symbol covers this address. */
683 if (it < map.end ())
684 {
685 if (it->value == map_key.value)
686 {
687 if (start)
688 *start = it->value + sec->addr ();
689 return it->type;
690 }
691 }
692
693 if (it > map.begin ())
694 {
695 arm_mapping_symbol_vec::const_iterator prev_it
696 = it - 1;
697
698 if (start)
699 *start = prev_it->value + sec->addr ();
700 return prev_it->type;
701 }
702 }
703 }
704
705 return 0;
706 }
707
708 /* Determine if the program counter specified in MEMADDR is in a Thumb
709 function. This function should be called for addresses unrelated to
710 any executing frame; otherwise, prefer arm_frame_is_thumb. */
711
712 int
713 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
714 {
715 struct bound_minimal_symbol sym;
716 char type;
717 arm_displaced_step_copy_insn_closure *dsc = nullptr;
718 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
719
720 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
721 dsc = ((arm_displaced_step_copy_insn_closure * )
722 gdbarch_displaced_step_copy_insn_closure_by_addr
723 (gdbarch, current_inferior (), memaddr));
724
725 /* If checking the mode of displaced instruction in copy area, the mode
726 should be determined by instruction on the original address. */
727 if (dsc)
728 {
729 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
730 (unsigned long) dsc->insn_addr,
731 (unsigned long) memaddr);
732 memaddr = dsc->insn_addr;
733 }
734
735 /* If bit 0 of the address is set, assume this is a Thumb address. */
736 if (IS_THUMB_ADDR (memaddr))
737 return 1;
738
739 /* If the user wants to override the symbol table, let him. */
740 if (strcmp (arm_force_mode_string, "arm") == 0)
741 return 0;
742 if (strcmp (arm_force_mode_string, "thumb") == 0)
743 return 1;
744
745 /* ARM v6-M and v7-M are always in Thumb mode. */
746 if (tdep->is_m)
747 return 1;
748
749 /* If there are mapping symbols, consult them. */
750 type = arm_find_mapping_symbol (memaddr, NULL);
751 if (type)
752 return type == 't';
753
754 /* Thumb functions have a "special" bit set in minimal symbols. */
755 sym = lookup_minimal_symbol_by_pc (memaddr);
756 if (sym.minsym)
757 return (MSYMBOL_IS_SPECIAL (sym.minsym));
758
759 /* If the user wants to override the fallback mode, let them. */
760 if (strcmp (arm_fallback_mode_string, "arm") == 0)
761 return 0;
762 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
763 return 1;
764
765 /* If we couldn't find any symbol, but we're talking to a running
766 target, then trust the current value of $cpsr. This lets
767 "display/i $pc" always show the correct mode (though if there is
768 a symbol table we will not reach here, so it still may not be
769 displayed in the mode it will be executed). */
770 if (target_has_registers ())
771 return arm_frame_is_thumb (get_current_frame ());
772
773 /* Otherwise we're out of luck; we assume ARM. */
774 return 0;
775 }
776
777 static inline bool
778 arm_m_addr_is_lockup (CORE_ADDR addr)
779 {
780 switch (addr)
781 {
782 /* Values for lockup state.
783 For more details see "B1.5.15 Unrecoverable exception cases" in
784 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
785 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
786 case 0xeffffffe:
787 case 0xfffffffe:
788 case 0xffffffff:
789 return true;
790
791 default:
792 /* Address is not lockup. */
793 return false;
794 }
795 }
796
797 /* Determine if the address specified equals any of these magic return
798 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
799 architectures. Also include lockup magic PC value.
800 Check also for FNC_RETURN if we have the v8-M security extension.
801
802 From ARMv6-M Reference Manual B1.5.8
803 Table B1-5 Exception return behavior
804
805 EXC_RETURN Return To Return Stack
806 0xFFFFFFF1 Handler mode Main
807 0xFFFFFFF9 Thread mode Main
808 0xFFFFFFFD Thread mode Process
809
810 From ARMv7-M Reference Manual B1.5.8
811 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
812
813 EXC_RETURN Return To Return Stack
814 0xFFFFFFF1 Handler mode Main
815 0xFFFFFFF9 Thread mode Main
816 0xFFFFFFFD Thread mode Process
817
818 Table B1-9 EXC_RETURN definition of exception return behavior, with
819 FP
820
821 EXC_RETURN Return To Return Stack Frame Type
822 0xFFFFFFE1 Handler mode Main Extended
823 0xFFFFFFE9 Thread mode Main Extended
824 0xFFFFFFED Thread mode Process Extended
825 0xFFFFFFF1 Handler mode Main Basic
826 0xFFFFFFF9 Thread mode Main Basic
827 0xFFFFFFFD Thread mode Process Basic
828
829 For more details see "B1.5.8 Exception return behavior"
830 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
831
832 From ARMv8-M Architecture Technical Reference, D1.2.95
833 FType, Mode and SPSEL bits are to be considered when the Security
834 Extension is not implemented.
835
836 EXC_RETURN Return To Return Stack Frame Type
837 0xFFFFFFA0 Handler mode Main Extended
838 0xFFFFFFA8 Thread mode Main Extended
839 0xFFFFFFAC Thread mode Process Extended
840 0xFFFFFFB0 Handler mode Main Standard
841 0xFFFFFFB8 Thread mode Main Standard
842 0xFFFFFFBC Thread mode Process Standard */
843
844 static int
845 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
846 {
847 if (arm_m_addr_is_lockup (addr))
848 return 1;
849
850 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
851 if (tdep->have_sec_ext)
852 {
853 switch ((addr & 0xff000000))
854 {
855 case 0xff000000: /* EXC_RETURN pattern. */
856 case 0xfe000000: /* FNC_RETURN pattern. */
857 return 1;
858 default:
859 return 0;
860 }
861 }
862 else
863 {
864 switch (addr)
865 {
866 /* Values from ARMv8-M Architecture Technical Reference. */
867 case 0xffffffa0:
868 case 0xffffffa8:
869 case 0xffffffac:
870 case 0xffffffb0:
871 case 0xffffffb8:
872 case 0xffffffbc:
873 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
874 the exception return behavior. */
875 case 0xffffffe1:
876 case 0xffffffe9:
877 case 0xffffffed:
878 case 0xfffffff1:
879 case 0xfffffff9:
880 case 0xfffffffd:
881 /* Address is magic. */
882 return 1;
883
884 default:
885 /* Address is not magic. */
886 return 0;
887 }
888 }
889 }
890
891 /* Remove useless bits from addresses in a running program. */
892 static CORE_ADDR
893 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
894 {
895 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
896
897 /* On M-profile devices, do not strip the low bit from EXC_RETURN
898 (the magic exception return address). */
899 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
900 return val;
901
902 if (arm_apcs_32)
903 return UNMAKE_THUMB_ADDR (val);
904 else
905 return (val & 0x03fffffc);
906 }
907
908 /* Return 1 if PC is the start of a compiler helper function which
909 can be safely ignored during prologue skipping. IS_THUMB is true
910 if the function is known to be a Thumb function due to the way it
911 is being called. */
912 static int
913 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
914 {
915 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
916 struct bound_minimal_symbol msym;
917
918 msym = lookup_minimal_symbol_by_pc (pc);
919 if (msym.minsym != NULL
920 && msym.value_address () == pc
921 && msym.minsym->linkage_name () != NULL)
922 {
923 const char *name = msym.minsym->linkage_name ();
924
925 /* The GNU linker's Thumb call stub to foo is named
926 __foo_from_thumb. */
927 if (strstr (name, "_from_thumb") != NULL)
928 name += 2;
929
930 /* On soft-float targets, __truncdfsf2 is called to convert promoted
931 arguments to their argument types in non-prototyped
932 functions. */
933 if (startswith (name, "__truncdfsf2"))
934 return 1;
935 if (startswith (name, "__aeabi_d2f"))
936 return 1;
937
938 /* Internal functions related to thread-local storage. */
939 if (startswith (name, "__tls_get_addr"))
940 return 1;
941 if (startswith (name, "__aeabi_read_tp"))
942 return 1;
943 }
944 else
945 {
946 /* If we run against a stripped glibc, we may be unable to identify
947 special functions by name. Check for one important case,
948 __aeabi_read_tp, by comparing the *code* against the default
949 implementation (this is hand-written ARM assembler in glibc). */
950
951 if (!is_thumb
952 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
953 == 0xe3e00a0f /* mov r0, #0xffff0fff */
954 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
955 == 0xe240f01f) /* sub pc, r0, #31 */
956 return 1;
957 }
958
959 return 0;
960 }
961
962 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
963 the first 16-bit of instruction, and INSN2 is the second 16-bit of
964 instruction. */
965 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
966 ((bits ((insn1), 0, 3) << 12) \
967 | (bits ((insn1), 10, 10) << 11) \
968 | (bits ((insn2), 12, 14) << 8) \
969 | bits ((insn2), 0, 7))
970
971 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
972 the 32-bit instruction. */
973 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
974 ((bits ((insn), 16, 19) << 12) \
975 | bits ((insn), 0, 11))
976
977 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
978
979 static unsigned int
980 thumb_expand_immediate (unsigned int imm)
981 {
982 unsigned int count = imm >> 7;
983
984 if (count < 8)
985 switch (count / 2)
986 {
987 case 0:
988 return imm & 0xff;
989 case 1:
990 return (imm & 0xff) | ((imm & 0xff) << 16);
991 case 2:
992 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
993 case 3:
994 return (imm & 0xff) | ((imm & 0xff) << 8)
995 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
996 }
997
998 return (0x80 | (imm & 0x7f)) << (32 - count);
999 }
1000
1001 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1002 epilogue, 0 otherwise. */
1003
1004 static int
1005 thumb_instruction_restores_sp (unsigned short insn)
1006 {
1007 return (insn == 0x46bd /* mov sp, r7 */
1008 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1009 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1010 }
1011
1012 /* Analyze a Thumb prologue, looking for a recognizable stack frame
1013 and frame pointer. Scan until we encounter a store that could
1014 clobber the stack frame unexpectedly, or an unknown instruction.
1015 Return the last address which is definitely safe to skip for an
1016 initial breakpoint. */
1017
1018 static CORE_ADDR
1019 thumb_analyze_prologue (struct gdbarch *gdbarch,
1020 CORE_ADDR start, CORE_ADDR limit,
1021 struct arm_prologue_cache *cache)
1022 {
1023 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1024 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1025 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1026 int i;
1027 pv_t regs[16];
1028 CORE_ADDR offset;
1029 CORE_ADDR unrecognized_pc = 0;
1030
1031 for (i = 0; i < 16; i++)
1032 regs[i] = pv_register (i, 0);
1033 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1034
1035 while (start < limit)
1036 {
1037 unsigned short insn;
1038 gdb::optional<bool> ra_signed_state;
1039
1040 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1041
1042 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1043 {
1044 int regno;
1045 int mask;
1046
1047 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1048 break;
1049
1050 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1051 whether to save LR (R14). */
1052 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1053
1054 /* Calculate offsets of saved R0-R7 and LR. */
1055 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1056 if (mask & (1 << regno))
1057 {
1058 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1059 -4);
1060 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1061 }
1062 }
1063 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1064 {
1065 offset = (insn & 0x7f) << 2; /* get scaled offset */
1066 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1067 -offset);
1068 }
1069 else if (thumb_instruction_restores_sp (insn))
1070 {
1071 /* Don't scan past the epilogue. */
1072 break;
1073 }
1074 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1075 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1076 (insn & 0xff) << 2);
1077 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1078 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1079 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1080 bits (insn, 6, 8));
1081 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1082 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1083 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1084 bits (insn, 0, 7));
1085 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1086 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1087 && pv_is_constant (regs[bits (insn, 3, 5)]))
1088 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1089 regs[bits (insn, 6, 8)]);
1090 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1091 && pv_is_constant (regs[bits (insn, 3, 6)]))
1092 {
1093 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1094 int rm = bits (insn, 3, 6);
1095 regs[rd] = pv_add (regs[rd], regs[rm]);
1096 }
1097 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1098 {
1099 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1100 int src_reg = (insn & 0x78) >> 3;
1101 regs[dst_reg] = regs[src_reg];
1102 }
1103 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1104 {
1105 /* Handle stores to the stack. Normally pushes are used,
1106 but with GCC -mtpcs-frame, there may be other stores
1107 in the prologue to create the frame. */
1108 int regno = (insn >> 8) & 0x7;
1109 pv_t addr;
1110
1111 offset = (insn & 0xff) << 2;
1112 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1113
1114 if (stack.store_would_trash (addr))
1115 break;
1116
1117 stack.store (addr, 4, regs[regno]);
1118 }
1119 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1120 {
1121 int rd = bits (insn, 0, 2);
1122 int rn = bits (insn, 3, 5);
1123 pv_t addr;
1124
1125 offset = bits (insn, 6, 10) << 2;
1126 addr = pv_add_constant (regs[rn], offset);
1127
1128 if (stack.store_would_trash (addr))
1129 break;
1130
1131 stack.store (addr, 4, regs[rd]);
1132 }
1133 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1134 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1135 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1136 /* Ignore stores of argument registers to the stack. */
1137 ;
1138 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1139 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1140 /* Ignore block loads from the stack, potentially copying
1141 parameters from memory. */
1142 ;
1143 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1144 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1145 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1146 /* Similarly ignore single loads from the stack. */
1147 ;
1148 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1149 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1150 /* Skip register copies, i.e. saves to another register
1151 instead of the stack. */
1152 ;
1153 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1154 /* Recognize constant loads; even with small stacks these are necessary
1155 on Thumb. */
1156 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1157 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1158 {
1159 /* Constant pool loads, for the same reason. */
1160 unsigned int constant;
1161 CORE_ADDR loc;
1162
1163 loc = start + 4 + bits (insn, 0, 7) * 4;
1164 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1165 regs[bits (insn, 8, 10)] = pv_constant (constant);
1166 }
1167 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1168 {
1169 unsigned short inst2;
1170
1171 inst2 = read_code_unsigned_integer (start + 2, 2,
1172 byte_order_for_code);
1173 uint32_t whole_insn = (insn << 16) | inst2;
1174
1175 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1176 {
1177 /* BL, BLX. Allow some special function calls when
1178 skipping the prologue; GCC generates these before
1179 storing arguments to the stack. */
1180 CORE_ADDR nextpc;
1181 int j1, j2, imm1, imm2;
1182
1183 imm1 = sbits (insn, 0, 10);
1184 imm2 = bits (inst2, 0, 10);
1185 j1 = bit (inst2, 13);
1186 j2 = bit (inst2, 11);
1187
1188 offset = ((imm1 << 12) + (imm2 << 1));
1189 offset ^= ((!j2) << 22) | ((!j1) << 23);
1190
1191 nextpc = start + 4 + offset;
1192 /* For BLX make sure to clear the low bits. */
1193 if (bit (inst2, 12) == 0)
1194 nextpc = nextpc & 0xfffffffc;
1195
1196 if (!skip_prologue_function (gdbarch, nextpc,
1197 bit (inst2, 12) != 0))
1198 break;
1199 }
1200
1201 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1202 { registers } */
1203 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1204 {
1205 pv_t addr = regs[bits (insn, 0, 3)];
1206 int regno;
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 /* Calculate offsets of saved registers. */
1212 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1213 if (inst2 & (1 << regno))
1214 {
1215 addr = pv_add_constant (addr, -4);
1216 stack.store (addr, 4, regs[regno]);
1217 }
1218
1219 if (insn & 0x0020)
1220 regs[bits (insn, 0, 3)] = addr;
1221 }
1222
1223 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1224 else if ((insn & 0xff20) == 0xed20
1225 && (inst2 & 0x0f00) == 0x0b00
1226 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1227 {
1228 /* Address SP points to. */
1229 pv_t addr = regs[bits (insn, 0, 3)];
1230
1231 /* Number of registers saved. */
1232 unsigned int number = bits (inst2, 0, 7) >> 1;
1233
1234 /* First register to save. */
1235 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1236
1237 if (stack.store_would_trash (addr))
1238 break;
1239
1240 /* Calculate offsets of saved registers. */
1241 for (; number > 0; number--)
1242 {
1243 addr = pv_add_constant (addr, -8);
1244 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1245 + vd + number, 0));
1246 }
1247
1248 /* Writeback SP to account for the saved registers. */
1249 regs[bits (insn, 0, 3)] = addr;
1250 }
1251
1252 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1253 [Rn, #+/-imm]{!} */
1254 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1255 {
1256 int regno1 = bits (inst2, 12, 15);
1257 int regno2 = bits (inst2, 8, 11);
1258 pv_t addr = regs[bits (insn, 0, 3)];
1259
1260 offset = inst2 & 0xff;
1261 if (insn & 0x0080)
1262 addr = pv_add_constant (addr, offset);
1263 else
1264 addr = pv_add_constant (addr, -offset);
1265
1266 if (stack.store_would_trash (addr))
1267 break;
1268
1269 stack.store (addr, 4, regs[regno1]);
1270 stack.store (pv_add_constant (addr, 4),
1271 4, regs[regno2]);
1272
1273 if (insn & 0x0020)
1274 regs[bits (insn, 0, 3)] = addr;
1275 }
1276
1277 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1278 && (inst2 & 0x0c00) == 0x0c00
1279 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1280 {
1281 int regno = bits (inst2, 12, 15);
1282 pv_t addr = regs[bits (insn, 0, 3)];
1283
1284 offset = inst2 & 0xff;
1285 if (inst2 & 0x0200)
1286 addr = pv_add_constant (addr, offset);
1287 else
1288 addr = pv_add_constant (addr, -offset);
1289
1290 if (stack.store_would_trash (addr))
1291 break;
1292
1293 stack.store (addr, 4, regs[regno]);
1294
1295 if (inst2 & 0x0100)
1296 regs[bits (insn, 0, 3)] = addr;
1297 }
1298
1299 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1300 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1301 {
1302 int regno = bits (inst2, 12, 15);
1303 pv_t addr;
1304
1305 offset = inst2 & 0xfff;
1306 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1307
1308 if (stack.store_would_trash (addr))
1309 break;
1310
1311 stack.store (addr, 4, regs[regno]);
1312 }
1313
1314 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1315 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1316 /* Ignore stores of argument registers to the stack. */
1317 ;
1318
1319 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1320 && (inst2 & 0x0d00) == 0x0c00
1321 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1322 /* Ignore stores of argument registers to the stack. */
1323 ;
1324
1325 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1326 { registers } */
1327 && (inst2 & 0x8000) == 0x0000
1328 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1329 /* Ignore block loads from the stack, potentially copying
1330 parameters from memory. */
1331 ;
1332
1333 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1334 [Rn, #+/-imm] */
1335 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1336 /* Similarly ignore dual loads from the stack. */
1337 ;
1338
1339 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1340 && (inst2 & 0x0d00) == 0x0c00
1341 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1342 /* Similarly ignore single loads from the stack. */
1343 ;
1344
1345 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1346 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1347 /* Similarly ignore single loads from the stack. */
1348 ;
1349
1350 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1351 && (inst2 & 0x8000) == 0x0000)
1352 {
1353 unsigned int imm = ((bits (insn, 10, 10) << 11)
1354 | (bits (inst2, 12, 14) << 8)
1355 | bits (inst2, 0, 7));
1356
1357 regs[bits (inst2, 8, 11)]
1358 = pv_add_constant (regs[bits (insn, 0, 3)],
1359 thumb_expand_immediate (imm));
1360 }
1361
1362 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1363 && (inst2 & 0x8000) == 0x0000)
1364 {
1365 unsigned int imm = ((bits (insn, 10, 10) << 11)
1366 | (bits (inst2, 12, 14) << 8)
1367 | bits (inst2, 0, 7));
1368
1369 regs[bits (inst2, 8, 11)]
1370 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1371 }
1372
1373 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1374 && (inst2 & 0x8000) == 0x0000)
1375 {
1376 unsigned int imm = ((bits (insn, 10, 10) << 11)
1377 | (bits (inst2, 12, 14) << 8)
1378 | bits (inst2, 0, 7));
1379
1380 regs[bits (inst2, 8, 11)]
1381 = pv_add_constant (regs[bits (insn, 0, 3)],
1382 - (CORE_ADDR) thumb_expand_immediate (imm));
1383 }
1384
1385 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1386 && (inst2 & 0x8000) == 0x0000)
1387 {
1388 unsigned int imm = ((bits (insn, 10, 10) << 11)
1389 | (bits (inst2, 12, 14) << 8)
1390 | bits (inst2, 0, 7));
1391
1392 regs[bits (inst2, 8, 11)]
1393 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1394 }
1395
1396 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1397 {
1398 unsigned int imm = ((bits (insn, 10, 10) << 11)
1399 | (bits (inst2, 12, 14) << 8)
1400 | bits (inst2, 0, 7));
1401
1402 regs[bits (inst2, 8, 11)]
1403 = pv_constant (thumb_expand_immediate (imm));
1404 }
1405
1406 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1407 {
1408 unsigned int imm
1409 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1410
1411 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1412 }
1413
1414 else if (insn == 0xea5f /* mov.w Rd,Rm */
1415 && (inst2 & 0xf0f0) == 0)
1416 {
1417 int dst_reg = (inst2 & 0x0f00) >> 8;
1418 int src_reg = inst2 & 0xf;
1419 regs[dst_reg] = regs[src_reg];
1420 }
1421
1422 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1423 {
1424 /* Constant pool loads. */
1425 unsigned int constant;
1426 CORE_ADDR loc;
1427
1428 offset = bits (inst2, 0, 11);
1429 if (insn & 0x0080)
1430 loc = start + 4 + offset;
1431 else
1432 loc = start + 4 - offset;
1433
1434 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1435 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1436 }
1437
1438 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1439 {
1440 /* Constant pool loads. */
1441 unsigned int constant;
1442 CORE_ADDR loc;
1443
1444 offset = bits (inst2, 0, 7) << 2;
1445 if (insn & 0x0080)
1446 loc = start + 4 + offset;
1447 else
1448 loc = start + 4 - offset;
1449
1450 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1451 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1452
1453 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1454 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1455 }
1456 /* Start of ARMv8.1-m PACBTI extension instructions. */
1457 else if (IS_PAC (whole_insn))
1458 {
1459 /* LR and SP are input registers. PAC is in R12. LR is
1460 signed from this point onwards. NOP space. */
1461 ra_signed_state = true;
1462 }
1463 else if (IS_PACBTI (whole_insn))
1464 {
1465 /* LR and SP are input registers. PAC is in R12 and PC is a
1466 valid BTI landing pad. LR is signed from this point onwards.
1467 NOP space. */
1468 ra_signed_state = true;
1469 }
1470 else if (IS_BTI (whole_insn))
1471 {
1472 /* Valid BTI landing pad. NOP space. */
1473 }
1474 else if (IS_PACG (whole_insn))
1475 {
1476 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1477 this point onwards. */
1478 ra_signed_state = true;
1479 }
1480 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1481 {
1482 /* These instructions appear close to the epilogue, when signed
1483 pointers are getting authenticated. */
1484 ra_signed_state = false;
1485 }
1486 /* End of ARMv8.1-m PACBTI extension instructions */
1487 else if (thumb2_instruction_changes_pc (insn, inst2))
1488 {
1489 /* Don't scan past anything that might change control flow. */
1490 break;
1491 }
1492 else
1493 {
1494 /* The optimizer might shove anything into the prologue,
1495 so we just skip what we don't recognize. */
1496 unrecognized_pc = start;
1497 }
1498
1499 /* Make sure we are dealing with a target that supports ARMv8.1-m
1500 PACBTI. */
1501 if (cache != nullptr && tdep->have_pacbti
1502 && ra_signed_state.has_value ())
1503 {
1504 arm_debug_printf ("Found pacbti instruction at %s",
1505 paddress (gdbarch, start));
1506 arm_debug_printf ("RA is %s",
1507 *ra_signed_state? "signed" : "not signed");
1508 cache->ra_signed_state = ra_signed_state;
1509 }
1510
1511 start += 2;
1512 }
1513 else if (thumb_instruction_changes_pc (insn))
1514 {
1515 /* Don't scan past anything that might change control flow. */
1516 break;
1517 }
1518 else
1519 {
1520 /* The optimizer might shove anything into the prologue,
1521 so we just skip what we don't recognize. */
1522 unrecognized_pc = start;
1523 }
1524
1525 start += 2;
1526 }
1527
1528 arm_debug_printf ("Prologue scan stopped at %s",
1529 paddress (gdbarch, start));
1530
1531 if (unrecognized_pc == 0)
1532 unrecognized_pc = start;
1533
1534 if (cache == NULL)
1535 return unrecognized_pc;
1536
1537 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1538 {
1539 /* Frame pointer is fp. Frame size is constant. */
1540 cache->framereg = ARM_FP_REGNUM;
1541 cache->framesize = -regs[ARM_FP_REGNUM].k;
1542 }
1543 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1544 {
1545 /* Frame pointer is r7. Frame size is constant. */
1546 cache->framereg = THUMB_FP_REGNUM;
1547 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1548 }
1549 else
1550 {
1551 /* Try the stack pointer... this is a bit desperate. */
1552 cache->framereg = ARM_SP_REGNUM;
1553 cache->framesize = -regs[ARM_SP_REGNUM].k;
1554 }
1555
1556 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1557 if (stack.find_reg (gdbarch, i, &offset))
1558 {
1559 cache->saved_regs[i].set_addr (offset);
1560 if (i == ARM_SP_REGNUM)
1561 arm_cache_set_active_sp_value(cache, tdep, offset);
1562 }
1563
1564 return unrecognized_pc;
1565 }
1566
1567
1568 /* Try to analyze the instructions starting from PC, which load symbol
1569 __stack_chk_guard. Return the address of instruction after loading this
1570 symbol, set the dest register number to *BASEREG, and set the size of
1571 instructions for loading symbol in OFFSET. Return 0 if instructions are
1572 not recognized. */
1573
1574 static CORE_ADDR
1575 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1576 unsigned int *destreg, int *offset)
1577 {
1578 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1579 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1580 unsigned int low, high, address;
1581
1582 address = 0;
1583 if (is_thumb)
1584 {
1585 unsigned short insn1
1586 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1587
1588 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1589 {
1590 *destreg = bits (insn1, 8, 10);
1591 *offset = 2;
1592 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1593 address = read_memory_unsigned_integer (address, 4,
1594 byte_order_for_code);
1595 }
1596 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1597 {
1598 unsigned short insn2
1599 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1600
1601 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1602
1603 insn1
1604 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1605 insn2
1606 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1607
1608 /* movt Rd, #const */
1609 if ((insn1 & 0xfbc0) == 0xf2c0)
1610 {
1611 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1612 *destreg = bits (insn2, 8, 11);
1613 *offset = 8;
1614 address = (high << 16 | low);
1615 }
1616 }
1617 }
1618 else
1619 {
1620 unsigned int insn
1621 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1622
1623 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1624 {
1625 address = bits (insn, 0, 11) + pc + 8;
1626 address = read_memory_unsigned_integer (address, 4,
1627 byte_order_for_code);
1628
1629 *destreg = bits (insn, 12, 15);
1630 *offset = 4;
1631 }
1632 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1633 {
1634 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1635
1636 insn
1637 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1638
1639 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1640 {
1641 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1642 *destreg = bits (insn, 12, 15);
1643 *offset = 8;
1644 address = (high << 16 | low);
1645 }
1646 }
1647 }
1648
1649 return address;
1650 }
1651
1652 /* Try to skip a sequence of instructions used for stack protector. If PC
1653 points to the first instruction of this sequence, return the address of
1654 first instruction after this sequence, otherwise, return original PC.
1655
1656 On arm, this sequence of instructions is composed of mainly three steps,
1657 Step 1: load symbol __stack_chk_guard,
1658 Step 2: load from address of __stack_chk_guard,
1659 Step 3: store it to somewhere else.
1660
1661 Usually, instructions on step 2 and step 3 are the same on various ARM
1662 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1663 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1664 instructions in step 1 vary from different ARM architectures. On ARMv7,
1665 they are,
1666
1667 movw Rn, #:lower16:__stack_chk_guard
1668 movt Rn, #:upper16:__stack_chk_guard
1669
1670 On ARMv5t, it is,
1671
1672 ldr Rn, .Label
1673 ....
1674 .Lable:
1675 .word __stack_chk_guard
1676
1677 Since ldr/str is a very popular instruction, we can't use them as
1678 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1679 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1680 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1681
1682 static CORE_ADDR
1683 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1684 {
1685 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1686 unsigned int basereg;
1687 struct bound_minimal_symbol stack_chk_guard;
1688 int offset;
1689 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1690 CORE_ADDR addr;
1691
1692 /* Try to parse the instructions in Step 1. */
1693 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1694 &basereg, &offset);
1695 if (!addr)
1696 return pc;
1697
1698 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1699 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1700 Otherwise, this sequence cannot be for stack protector. */
1701 if (stack_chk_guard.minsym == NULL
1702 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1703 return pc;
1704
1705 if (is_thumb)
1706 {
1707 unsigned int destreg;
1708 unsigned short insn
1709 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1710
1711 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1712 if ((insn & 0xf800) != 0x6800)
1713 return pc;
1714 if (bits (insn, 3, 5) != basereg)
1715 return pc;
1716 destreg = bits (insn, 0, 2);
1717
1718 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1719 byte_order_for_code);
1720 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1721 if ((insn & 0xf800) != 0x6000)
1722 return pc;
1723 if (destreg != bits (insn, 0, 2))
1724 return pc;
1725 }
1726 else
1727 {
1728 unsigned int destreg;
1729 unsigned int insn
1730 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1731
1732 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1733 if ((insn & 0x0e500000) != 0x04100000)
1734 return pc;
1735 if (bits (insn, 16, 19) != basereg)
1736 return pc;
1737 destreg = bits (insn, 12, 15);
1738 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1739 insn = read_code_unsigned_integer (pc + offset + 4,
1740 4, byte_order_for_code);
1741 if ((insn & 0x0e500000) != 0x04000000)
1742 return pc;
1743 if (bits (insn, 12, 15) != destreg)
1744 return pc;
1745 }
1746 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1747 on arm. */
1748 if (is_thumb)
1749 return pc + offset + 4;
1750 else
1751 return pc + offset + 8;
1752 }
1753
1754 /* Advance the PC across any function entry prologue instructions to
1755 reach some "real" code.
1756
1757 The APCS (ARM Procedure Call Standard) defines the following
1758 prologue:
1759
1760 mov ip, sp
1761 [stmfd sp!, {a1,a2,a3,a4}]
1762 stmfd sp!, {...,fp,ip,lr,pc}
1763 [stfe f7, [sp, #-12]!]
1764 [stfe f6, [sp, #-12]!]
1765 [stfe f5, [sp, #-12]!]
1766 [stfe f4, [sp, #-12]!]
1767 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1768
1769 static CORE_ADDR
1770 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1771 {
1772 CORE_ADDR func_addr, limit_pc;
1773
1774 /* See if we can determine the end of the prologue via the symbol table.
1775 If so, then return either PC, or the PC after the prologue, whichever
1776 is greater. */
1777 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1778 {
1779 CORE_ADDR post_prologue_pc
1780 = skip_prologue_using_sal (gdbarch, func_addr);
1781 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1782
1783 if (post_prologue_pc)
1784 post_prologue_pc
1785 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1786
1787
1788 /* GCC always emits a line note before the prologue and another
1789 one after, even if the two are at the same address or on the
1790 same line. Take advantage of this so that we do not need to
1791 know every instruction that might appear in the prologue. We
1792 will have producer information for most binaries; if it is
1793 missing (e.g. for -gstabs), assuming the GNU tools. */
1794 if (post_prologue_pc
1795 && (cust == NULL
1796 || cust->producer () == NULL
1797 || startswith (cust->producer (), "GNU ")
1798 || producer_is_llvm (cust->producer ())))
1799 return post_prologue_pc;
1800
1801 if (post_prologue_pc != 0)
1802 {
1803 CORE_ADDR analyzed_limit;
1804
1805 /* For non-GCC compilers, make sure the entire line is an
1806 acceptable prologue; GDB will round this function's
1807 return value up to the end of the following line so we
1808 can not skip just part of a line (and we do not want to).
1809
1810 RealView does not treat the prologue specially, but does
1811 associate prologue code with the opening brace; so this
1812 lets us skip the first line if we think it is the opening
1813 brace. */
1814 if (arm_pc_is_thumb (gdbarch, func_addr))
1815 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1816 post_prologue_pc, NULL);
1817 else
1818 analyzed_limit
1819 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1820 NULL, target_arm_instruction_reader ());
1821
1822 if (analyzed_limit != post_prologue_pc)
1823 return func_addr;
1824
1825 return post_prologue_pc;
1826 }
1827 }
1828
1829 /* Can't determine prologue from the symbol table, need to examine
1830 instructions. */
1831
1832 /* Find an upper limit on the function prologue using the debug
1833 information. If the debug information could not be used to provide
1834 that bound, then use an arbitrary large number as the upper bound. */
1835 /* Like arm_scan_prologue, stop no later than pc + 64. */
1836 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1837 if (limit_pc == 0)
1838 limit_pc = pc + 64; /* Magic. */
1839
1840
1841 /* Check if this is Thumb code. */
1842 if (arm_pc_is_thumb (gdbarch, pc))
1843 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1844 else
1845 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1846 target_arm_instruction_reader ());
1847 }
1848
1849 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1850 This function decodes a Thumb function prologue to determine:
1851 1) the size of the stack frame
1852 2) which registers are saved on it
1853 3) the offsets of saved regs
1854 4) the offset from the stack pointer to the frame pointer
1855
1856 A typical Thumb function prologue would create this stack frame
1857 (offsets relative to FP)
1858 old SP -> 24 stack parameters
1859 20 LR
1860 16 R7
1861 R7 -> 0 local variables (16 bytes)
1862 SP -> -12 additional stack space (12 bytes)
1863 The frame size would thus be 36 bytes, and the frame offset would be
1864 12 bytes. The frame register is R7.
1865
1866 The comments for thumb_skip_prolog() describe the algorithm we use
1867 to detect the end of the prolog. */
1868
1869 static void
1870 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1871 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1872 {
1873 CORE_ADDR prologue_start;
1874 CORE_ADDR prologue_end;
1875
1876 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1877 &prologue_end))
1878 {
1879 /* See comment in arm_scan_prologue for an explanation of
1880 this heuristics. */
1881 if (prologue_end > prologue_start + 64)
1882 {
1883 prologue_end = prologue_start + 64;
1884 }
1885 }
1886 else
1887 /* We're in the boondocks: we have no idea where the start of the
1888 function is. */
1889 return;
1890
1891 prologue_end = std::min (prologue_end, prev_pc);
1892
1893 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1894 }
1895
1896 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1897 otherwise. */
1898
1899 static int
1900 arm_instruction_restores_sp (unsigned int insn)
1901 {
1902 if (bits (insn, 28, 31) != INST_NV)
1903 {
1904 if ((insn & 0x0df0f000) == 0x0080d000
1905 /* ADD SP (register or immediate). */
1906 || (insn & 0x0df0f000) == 0x0040d000
1907 /* SUB SP (register or immediate). */
1908 || (insn & 0x0ffffff0) == 0x01a0d000
1909 /* MOV SP. */
1910 || (insn & 0x0fff0000) == 0x08bd0000
1911 /* POP (LDMIA). */
1912 || (insn & 0x0fff0000) == 0x049d0000)
1913 /* POP of a single register. */
1914 return 1;
1915 }
1916
1917 return 0;
1918 }
1919
1920 /* Implement immediate value decoding, as described in section A5.2.4
1921 (Modified immediate constants in ARM instructions) of the ARM Architecture
1922 Reference Manual (ARMv7-A and ARMv7-R edition). */
1923
1924 static uint32_t
1925 arm_expand_immediate (uint32_t imm)
1926 {
1927 /* Immediate values are 12 bits long. */
1928 gdb_assert ((imm & 0xfffff000) == 0);
1929
1930 uint32_t unrotated_value = imm & 0xff;
1931 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1932
1933 if (rotate_amount == 0)
1934 return unrotated_value;
1935
1936 return ((unrotated_value >> rotate_amount)
1937 | (unrotated_value << (32 - rotate_amount)));
1938 }
1939
1940 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1941 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1942 fill it in. Return the first address not recognized as a prologue
1943 instruction.
1944
1945 We recognize all the instructions typically found in ARM prologues,
1946 plus harmless instructions which can be skipped (either for analysis
1947 purposes, or a more restrictive set that can be skipped when finding
1948 the end of the prologue). */
1949
1950 static CORE_ADDR
1951 arm_analyze_prologue (struct gdbarch *gdbarch,
1952 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1953 struct arm_prologue_cache *cache,
1954 const arm_instruction_reader &insn_reader)
1955 {
1956 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1957 int regno;
1958 CORE_ADDR offset, current_pc;
1959 pv_t regs[ARM_FPS_REGNUM];
1960 CORE_ADDR unrecognized_pc = 0;
1961 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1962
1963 /* Search the prologue looking for instructions that set up the
1964 frame pointer, adjust the stack pointer, and save registers.
1965
1966 Be careful, however, and if it doesn't look like a prologue,
1967 don't try to scan it. If, for instance, a frameless function
1968 begins with stmfd sp!, then we will tell ourselves there is
1969 a frame, which will confuse stack traceback, as well as "finish"
1970 and other operations that rely on a knowledge of the stack
1971 traceback. */
1972
1973 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1974 regs[regno] = pv_register (regno, 0);
1975 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1976
1977 for (current_pc = prologue_start;
1978 current_pc < prologue_end;
1979 current_pc += 4)
1980 {
1981 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1982
1983 if (insn == 0xe1a0c00d) /* mov ip, sp */
1984 {
1985 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1986 continue;
1987 }
1988 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1989 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1990 {
1991 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1992 int rd = bits (insn, 12, 15);
1993 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1994 continue;
1995 }
1996 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1997 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1998 {
1999 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2000 int rd = bits (insn, 12, 15);
2001 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2002 continue;
2003 }
2004 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2005 [sp, #-4]! */
2006 {
2007 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2008 break;
2009 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2010 stack.store (regs[ARM_SP_REGNUM], 4,
2011 regs[bits (insn, 12, 15)]);
2012 continue;
2013 }
2014 else if ((insn & 0xffff0000) == 0xe92d0000)
2015 /* stmfd sp!, {..., fp, ip, lr, pc}
2016 or
2017 stmfd sp!, {a1, a2, a3, a4} */
2018 {
2019 int mask = insn & 0xffff;
2020
2021 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2022 break;
2023
2024 /* Calculate offsets of saved registers. */
2025 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2026 if (mask & (1 << regno))
2027 {
2028 regs[ARM_SP_REGNUM]
2029 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2030 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2031 }
2032 }
2033 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2034 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2035 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2036 {
2037 /* No need to add this to saved_regs -- it's just an arg reg. */
2038 continue;
2039 }
2040 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2041 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2042 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2043 {
2044 /* No need to add this to saved_regs -- it's just an arg reg. */
2045 continue;
2046 }
2047 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2048 { registers } */
2049 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2050 {
2051 /* No need to add this to saved_regs -- it's just arg regs. */
2052 continue;
2053 }
2054 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2055 {
2056 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2057 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2058 }
2059 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2060 {
2061 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2062 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2063 }
2064 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2065 [sp, -#c]! */
2066 && tdep->have_fpa_registers)
2067 {
2068 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2069 break;
2070
2071 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2072 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2073 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2074 }
2075 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2076 [sp!] */
2077 && tdep->have_fpa_registers)
2078 {
2079 int n_saved_fp_regs;
2080 unsigned int fp_start_reg, fp_bound_reg;
2081
2082 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2083 break;
2084
2085 if ((insn & 0x800) == 0x800) /* N0 is set */
2086 {
2087 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2088 n_saved_fp_regs = 3;
2089 else
2090 n_saved_fp_regs = 1;
2091 }
2092 else
2093 {
2094 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2095 n_saved_fp_regs = 2;
2096 else
2097 n_saved_fp_regs = 4;
2098 }
2099
2100 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2101 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2102 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2103 {
2104 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2105 stack.store (regs[ARM_SP_REGNUM], 12,
2106 regs[fp_start_reg++]);
2107 }
2108 }
2109 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2110 {
2111 /* Allow some special function calls when skipping the
2112 prologue; GCC generates these before storing arguments to
2113 the stack. */
2114 CORE_ADDR dest = BranchDest (current_pc, insn);
2115
2116 if (skip_prologue_function (gdbarch, dest, 0))
2117 continue;
2118 else
2119 break;
2120 }
2121 else if ((insn & 0xf0000000) != 0xe0000000)
2122 break; /* Condition not true, exit early. */
2123 else if (arm_instruction_changes_pc (insn))
2124 /* Don't scan past anything that might change control flow. */
2125 break;
2126 else if (arm_instruction_restores_sp (insn))
2127 {
2128 /* Don't scan past the epilogue. */
2129 break;
2130 }
2131 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2132 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2133 /* Ignore block loads from the stack, potentially copying
2134 parameters from memory. */
2135 continue;
2136 else if ((insn & 0xfc500000) == 0xe4100000
2137 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2138 /* Similarly ignore single loads from the stack. */
2139 continue;
2140 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2141 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2142 register instead of the stack. */
2143 continue;
2144 else
2145 {
2146 /* The optimizer might shove anything into the prologue, if
2147 we build up cache (cache != NULL) from scanning prologue,
2148 we just skip what we don't recognize and scan further to
2149 make cache as complete as possible. However, if we skip
2150 prologue, we'll stop immediately on unrecognized
2151 instruction. */
2152 unrecognized_pc = current_pc;
2153 if (cache != NULL)
2154 continue;
2155 else
2156 break;
2157 }
2158 }
2159
2160 if (unrecognized_pc == 0)
2161 unrecognized_pc = current_pc;
2162
2163 if (cache)
2164 {
2165 int framereg, framesize;
2166
2167 /* The frame size is just the distance from the frame register
2168 to the original stack pointer. */
2169 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2170 {
2171 /* Frame pointer is fp. */
2172 framereg = ARM_FP_REGNUM;
2173 framesize = -regs[ARM_FP_REGNUM].k;
2174 }
2175 else
2176 {
2177 /* Try the stack pointer... this is a bit desperate. */
2178 framereg = ARM_SP_REGNUM;
2179 framesize = -regs[ARM_SP_REGNUM].k;
2180 }
2181
2182 cache->framereg = framereg;
2183 cache->framesize = framesize;
2184
2185 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2186 if (stack.find_reg (gdbarch, regno, &offset))
2187 {
2188 cache->saved_regs[regno].set_addr (offset);
2189 if (regno == ARM_SP_REGNUM)
2190 arm_cache_set_active_sp_value(cache, tdep, offset);
2191 }
2192 }
2193
2194 arm_debug_printf ("Prologue scan stopped at %s",
2195 paddress (gdbarch, unrecognized_pc));
2196
2197 return unrecognized_pc;
2198 }
2199
2200 static void
2201 arm_scan_prologue (frame_info_ptr this_frame,
2202 struct arm_prologue_cache *cache)
2203 {
2204 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2205 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2206 CORE_ADDR prologue_start, prologue_end;
2207 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2208 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2209 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2210
2211 /* Assume there is no frame until proven otherwise. */
2212 cache->framereg = ARM_SP_REGNUM;
2213 cache->framesize = 0;
2214
2215 /* Check for Thumb prologue. */
2216 if (arm_frame_is_thumb (this_frame))
2217 {
2218 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2219 return;
2220 }
2221
2222 /* Find the function prologue. If we can't find the function in
2223 the symbol table, peek in the stack frame to find the PC. */
2224 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2225 &prologue_end))
2226 {
2227 /* One way to find the end of the prologue (which works well
2228 for unoptimized code) is to do the following:
2229
2230 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2231
2232 if (sal.line == 0)
2233 prologue_end = prev_pc;
2234 else if (sal.end < prologue_end)
2235 prologue_end = sal.end;
2236
2237 This mechanism is very accurate so long as the optimizer
2238 doesn't move any instructions from the function body into the
2239 prologue. If this happens, sal.end will be the last
2240 instruction in the first hunk of prologue code just before
2241 the first instruction that the scheduler has moved from
2242 the body to the prologue.
2243
2244 In order to make sure that we scan all of the prologue
2245 instructions, we use a slightly less accurate mechanism which
2246 may scan more than necessary. To help compensate for this
2247 lack of accuracy, the prologue scanning loop below contains
2248 several clauses which'll cause the loop to terminate early if
2249 an implausible prologue instruction is encountered.
2250
2251 The expression
2252
2253 prologue_start + 64
2254
2255 is a suitable endpoint since it accounts for the largest
2256 possible prologue plus up to five instructions inserted by
2257 the scheduler. */
2258
2259 if (prologue_end > prologue_start + 64)
2260 {
2261 prologue_end = prologue_start + 64; /* See above. */
2262 }
2263 }
2264 else
2265 {
2266 /* We have no symbol information. Our only option is to assume this
2267 function has a standard stack frame and the normal frame register.
2268 Then, we can find the value of our frame pointer on entrance to
2269 the callee (or at the present moment if this is the innermost frame).
2270 The value stored there should be the address of the stmfd + 8. */
2271 CORE_ADDR frame_loc;
2272 ULONGEST return_value;
2273
2274 /* AAPCS does not use a frame register, so we can abort here. */
2275 if (tdep->arm_abi == ARM_ABI_AAPCS)
2276 return;
2277
2278 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2279 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2280 &return_value))
2281 return;
2282 else
2283 {
2284 prologue_start = gdbarch_addr_bits_remove
2285 (gdbarch, return_value) - 8;
2286 prologue_end = prologue_start + 64; /* See above. */
2287 }
2288 }
2289
2290 if (prev_pc < prologue_end)
2291 prologue_end = prev_pc;
2292
2293 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2294 target_arm_instruction_reader ());
2295 }
2296
2297 static struct arm_prologue_cache *
2298 arm_make_prologue_cache (frame_info_ptr this_frame)
2299 {
2300 int reg;
2301 struct arm_prologue_cache *cache;
2302 CORE_ADDR unwound_fp, prev_sp;
2303
2304 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2305 arm_cache_init (cache, this_frame);
2306
2307 arm_scan_prologue (this_frame, cache);
2308
2309 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2310 if (unwound_fp == 0)
2311 return cache;
2312
2313 arm_gdbarch_tdep *tdep =
2314 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2315
2316 prev_sp = unwound_fp + cache->framesize;
2317 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2318
2319 /* Calculate actual addresses of saved registers using offsets
2320 determined by arm_scan_prologue. */
2321 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2322 if (cache->saved_regs[reg].is_addr ())
2323 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2324 prev_sp);
2325
2326 return cache;
2327 }
2328
2329 /* Implementation of the stop_reason hook for arm_prologue frames. */
2330
2331 static enum unwind_stop_reason
2332 arm_prologue_unwind_stop_reason (frame_info_ptr this_frame,
2333 void **this_cache)
2334 {
2335 struct arm_prologue_cache *cache;
2336 CORE_ADDR pc;
2337
2338 if (*this_cache == NULL)
2339 *this_cache = arm_make_prologue_cache (this_frame);
2340 cache = (struct arm_prologue_cache *) *this_cache;
2341
2342 /* This is meant to halt the backtrace at "_start". */
2343 pc = get_frame_pc (this_frame);
2344 gdbarch *arch = get_frame_arch (this_frame);
2345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2346 if (pc <= tdep->lowest_pc)
2347 return UNWIND_OUTERMOST;
2348
2349 /* If we've hit a wall, stop. */
2350 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2351 return UNWIND_OUTERMOST;
2352
2353 return UNWIND_NO_REASON;
2354 }
2355
2356 /* Our frame ID for a normal frame is the current function's starting PC
2357 and the caller's SP when we were called. */
2358
2359 static void
2360 arm_prologue_this_id (frame_info_ptr this_frame,
2361 void **this_cache,
2362 struct frame_id *this_id)
2363 {
2364 struct arm_prologue_cache *cache;
2365 struct frame_id id;
2366 CORE_ADDR pc, func;
2367
2368 if (*this_cache == NULL)
2369 *this_cache = arm_make_prologue_cache (this_frame);
2370 cache = (struct arm_prologue_cache *) *this_cache;
2371
2372 arm_gdbarch_tdep *tdep
2373 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2374
2375 /* Use function start address as part of the frame ID. If we cannot
2376 identify the start address (due to missing symbol information),
2377 fall back to just using the current PC. */
2378 pc = get_frame_pc (this_frame);
2379 func = get_frame_func (this_frame);
2380 if (!func)
2381 func = pc;
2382
2383 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2384 *this_id = id;
2385 }
2386
2387 static struct value *
2388 arm_prologue_prev_register (frame_info_ptr this_frame,
2389 void **this_cache,
2390 int prev_regnum)
2391 {
2392 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2393 struct arm_prologue_cache *cache;
2394 CORE_ADDR sp_value;
2395
2396 if (*this_cache == NULL)
2397 *this_cache = arm_make_prologue_cache (this_frame);
2398 cache = (struct arm_prologue_cache *) *this_cache;
2399
2400 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2401
2402 /* If this frame has signed the return address, mark it as so. */
2403 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2404 && *cache->ra_signed_state)
2405 set_frame_previous_pc_masked (this_frame);
2406
2407 /* If we are asked to unwind the PC, then we need to return the LR
2408 instead. The prologue may save PC, but it will point into this
2409 frame's prologue, not the next frame's resume location. Also
2410 strip the saved T bit. A valid LR may have the low bit set, but
2411 a valid PC never does. */
2412 if (prev_regnum == ARM_PC_REGNUM)
2413 {
2414 CORE_ADDR lr;
2415
2416 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2417 return frame_unwind_got_constant (this_frame, prev_regnum,
2418 arm_addr_bits_remove (gdbarch, lr));
2419 }
2420
2421 /* SP is generally not saved to the stack, but this frame is
2422 identified by the next frame's stack pointer at the time of the call.
2423 The value was already reconstructed into PREV_SP. */
2424 if (prev_regnum == ARM_SP_REGNUM)
2425 return frame_unwind_got_constant (this_frame, prev_regnum,
2426 arm_cache_get_prev_sp_value (cache, tdep));
2427
2428 /* The value might be one of the alternative SP, if so, use the
2429 value already constructed. */
2430 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2431 {
2432 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2433 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2434 }
2435
2436 /* The CPSR may have been changed by the call instruction and by the
2437 called function. The only bit we can reconstruct is the T bit,
2438 by checking the low bit of LR as of the call. This is a reliable
2439 indicator of Thumb-ness except for some ARM v4T pre-interworking
2440 Thumb code, which could get away with a clear low bit as long as
2441 the called function did not use bx. Guess that all other
2442 bits are unchanged; the condition flags are presumably lost,
2443 but the processor status is likely valid. */
2444 if (prev_regnum == ARM_PS_REGNUM)
2445 {
2446 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2447 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2448
2449 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2450 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2451 }
2452
2453 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2454 prev_regnum);
2455 }
2456
2457 static frame_unwind arm_prologue_unwind = {
2458 "arm prologue",
2459 NORMAL_FRAME,
2460 arm_prologue_unwind_stop_reason,
2461 arm_prologue_this_id,
2462 arm_prologue_prev_register,
2463 NULL,
2464 default_frame_sniffer
2465 };
2466
2467 /* Maintain a list of ARM exception table entries per objfile, similar to the
2468 list of mapping symbols. We only cache entries for standard ARM-defined
2469 personality routines; the cache will contain only the frame unwinding
2470 instructions associated with the entry (not the descriptors). */
2471
2472 struct arm_exidx_entry
2473 {
2474 CORE_ADDR addr;
2475 gdb_byte *entry;
2476
2477 bool operator< (const arm_exidx_entry &other) const
2478 {
2479 return addr < other.addr;
2480 }
2481 };
2482
2483 struct arm_exidx_data
2484 {
2485 std::vector<std::vector<arm_exidx_entry>> section_maps;
2486 };
2487
2488 /* Per-BFD key to store exception handling information. */
2489 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2490
2491 static struct obj_section *
2492 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2493 {
2494 for (obj_section *osect : objfile->sections ())
2495 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2496 {
2497 bfd_vma start, size;
2498 start = bfd_section_vma (osect->the_bfd_section);
2499 size = bfd_section_size (osect->the_bfd_section);
2500
2501 if (start <= vma && vma < start + size)
2502 return osect;
2503 }
2504
2505 return NULL;
2506 }
2507
2508 /* Parse contents of exception table and exception index sections
2509 of OBJFILE, and fill in the exception table entry cache.
2510
2511 For each entry that refers to a standard ARM-defined personality
2512 routine, extract the frame unwinding instructions (from either
2513 the index or the table section). The unwinding instructions
2514 are normalized by:
2515 - extracting them from the rest of the table data
2516 - converting to host endianness
2517 - appending the implicit 0xb0 ("Finish") code
2518
2519 The extracted and normalized instructions are stored for later
2520 retrieval by the arm_find_exidx_entry routine. */
2521
2522 static void
2523 arm_exidx_new_objfile (struct objfile *objfile)
2524 {
2525 struct arm_exidx_data *data;
2526 asection *exidx, *extab;
2527 bfd_vma exidx_vma = 0, extab_vma = 0;
2528 LONGEST i;
2529
2530 /* If we've already touched this file, do nothing. */
2531 if (!objfile || arm_exidx_data_key.get (objfile->obfd.get ()) != NULL)
2532 return;
2533
2534 /* Read contents of exception table and index. */
2535 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2536 ELF_STRING_ARM_unwind);
2537 gdb::byte_vector exidx_data;
2538 if (exidx)
2539 {
2540 exidx_vma = bfd_section_vma (exidx);
2541 exidx_data.resize (bfd_section_size (exidx));
2542
2543 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2544 exidx_data.data (), 0,
2545 exidx_data.size ()))
2546 return;
2547 }
2548
2549 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2550 gdb::byte_vector extab_data;
2551 if (extab)
2552 {
2553 extab_vma = bfd_section_vma (extab);
2554 extab_data.resize (bfd_section_size (extab));
2555
2556 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2557 extab_data.data (), 0,
2558 extab_data.size ()))
2559 return;
2560 }
2561
2562 /* Allocate exception table data structure. */
2563 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2564 data->section_maps.resize (objfile->obfd->section_count);
2565
2566 /* Fill in exception table. */
2567 for (i = 0; i < exidx_data.size () / 8; i++)
2568 {
2569 struct arm_exidx_entry new_exidx_entry;
2570 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2571 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2572 exidx_data.data () + i * 8 + 4);
2573 bfd_vma addr = 0, word = 0;
2574 int n_bytes = 0, n_words = 0;
2575 struct obj_section *sec;
2576 gdb_byte *entry = NULL;
2577
2578 /* Extract address of start of function. */
2579 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2580 idx += exidx_vma + i * 8;
2581
2582 /* Find section containing function and compute section offset. */
2583 sec = arm_obj_section_from_vma (objfile, idx);
2584 if (sec == NULL)
2585 continue;
2586 idx -= bfd_section_vma (sec->the_bfd_section);
2587
2588 /* Determine address of exception table entry. */
2589 if (val == 1)
2590 {
2591 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2592 }
2593 else if ((val & 0xff000000) == 0x80000000)
2594 {
2595 /* Exception table entry embedded in .ARM.exidx
2596 -- must be short form. */
2597 word = val;
2598 n_bytes = 3;
2599 }
2600 else if (!(val & 0x80000000))
2601 {
2602 /* Exception table entry in .ARM.extab. */
2603 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2604 addr += exidx_vma + i * 8 + 4;
2605
2606 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2607 {
2608 word = bfd_h_get_32 (objfile->obfd,
2609 extab_data.data () + addr - extab_vma);
2610 addr += 4;
2611
2612 if ((word & 0xff000000) == 0x80000000)
2613 {
2614 /* Short form. */
2615 n_bytes = 3;
2616 }
2617 else if ((word & 0xff000000) == 0x81000000
2618 || (word & 0xff000000) == 0x82000000)
2619 {
2620 /* Long form. */
2621 n_bytes = 2;
2622 n_words = ((word >> 16) & 0xff);
2623 }
2624 else if (!(word & 0x80000000))
2625 {
2626 bfd_vma pers;
2627 struct obj_section *pers_sec;
2628 int gnu_personality = 0;
2629
2630 /* Custom personality routine. */
2631 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2632 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2633
2634 /* Check whether we've got one of the variants of the
2635 GNU personality routines. */
2636 pers_sec = arm_obj_section_from_vma (objfile, pers);
2637 if (pers_sec)
2638 {
2639 static const char *personality[] =
2640 {
2641 "__gcc_personality_v0",
2642 "__gxx_personality_v0",
2643 "__gcj_personality_v0",
2644 "__gnu_objc_personality_v0",
2645 NULL
2646 };
2647
2648 CORE_ADDR pc = pers + pers_sec->offset ();
2649 int k;
2650
2651 for (k = 0; personality[k]; k++)
2652 if (lookup_minimal_symbol_by_pc_name
2653 (pc, personality[k], objfile))
2654 {
2655 gnu_personality = 1;
2656 break;
2657 }
2658 }
2659
2660 /* If so, the next word contains a word count in the high
2661 byte, followed by the same unwind instructions as the
2662 pre-defined forms. */
2663 if (gnu_personality
2664 && addr + 4 <= extab_vma + extab_data.size ())
2665 {
2666 word = bfd_h_get_32 (objfile->obfd,
2667 (extab_data.data ()
2668 + addr - extab_vma));
2669 addr += 4;
2670 n_bytes = 3;
2671 n_words = ((word >> 24) & 0xff);
2672 }
2673 }
2674 }
2675 }
2676
2677 /* Sanity check address. */
2678 if (n_words)
2679 if (addr < extab_vma
2680 || addr + 4 * n_words > extab_vma + extab_data.size ())
2681 n_words = n_bytes = 0;
2682
2683 /* The unwind instructions reside in WORD (only the N_BYTES least
2684 significant bytes are valid), followed by N_WORDS words in the
2685 extab section starting at ADDR. */
2686 if (n_bytes || n_words)
2687 {
2688 gdb_byte *p = entry
2689 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2690 n_bytes + n_words * 4 + 1);
2691
2692 while (n_bytes--)
2693 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2694
2695 while (n_words--)
2696 {
2697 word = bfd_h_get_32 (objfile->obfd,
2698 extab_data.data () + addr - extab_vma);
2699 addr += 4;
2700
2701 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2702 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2703 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2704 *p++ = (gdb_byte) (word & 0xff);
2705 }
2706
2707 /* Implied "Finish" to terminate the list. */
2708 *p++ = 0xb0;
2709 }
2710
2711 /* Push entry onto vector. They are guaranteed to always
2712 appear in order of increasing addresses. */
2713 new_exidx_entry.addr = idx;
2714 new_exidx_entry.entry = entry;
2715 data->section_maps[sec->the_bfd_section->index].push_back
2716 (new_exidx_entry);
2717 }
2718 }
2719
2720 /* Search for the exception table entry covering MEMADDR. If one is found,
2721 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2722 set *START to the start of the region covered by this entry. */
2723
2724 static gdb_byte *
2725 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2726 {
2727 struct obj_section *sec;
2728
2729 sec = find_pc_section (memaddr);
2730 if (sec != NULL)
2731 {
2732 struct arm_exidx_data *data;
2733 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2734
2735 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2736 if (data != NULL)
2737 {
2738 std::vector<arm_exidx_entry> &map
2739 = data->section_maps[sec->the_bfd_section->index];
2740 if (!map.empty ())
2741 {
2742 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2743
2744 /* std::lower_bound finds the earliest ordered insertion
2745 point. If the following symbol starts at this exact
2746 address, we use that; otherwise, the preceding
2747 exception table entry covers this address. */
2748 if (idx < map.end ())
2749 {
2750 if (idx->addr == map_key.addr)
2751 {
2752 if (start)
2753 *start = idx->addr + sec->addr ();
2754 return idx->entry;
2755 }
2756 }
2757
2758 if (idx > map.begin ())
2759 {
2760 idx = idx - 1;
2761 if (start)
2762 *start = idx->addr + sec->addr ();
2763 return idx->entry;
2764 }
2765 }
2766 }
2767 }
2768
2769 return NULL;
2770 }
2771
2772 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2773 instruction list from the ARM exception table entry ENTRY, allocate and
2774 return a prologue cache structure describing how to unwind this frame.
2775
2776 Return NULL if the unwinding instruction list contains a "spare",
2777 "reserved" or "refuse to unwind" instruction as defined in section
2778 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2779 for the ARM Architecture" document. */
2780
2781 static struct arm_prologue_cache *
2782 arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry)
2783 {
2784 CORE_ADDR vsp = 0;
2785 int vsp_valid = 0;
2786
2787 struct arm_prologue_cache *cache;
2788 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2789 arm_cache_init (cache, this_frame);
2790
2791 for (;;)
2792 {
2793 gdb_byte insn;
2794
2795 /* Whenever we reload SP, we actually have to retrieve its
2796 actual value in the current frame. */
2797 if (!vsp_valid)
2798 {
2799 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2800 {
2801 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2802 vsp = get_frame_register_unsigned (this_frame, reg);
2803 }
2804 else
2805 {
2806 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2807 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2808 }
2809
2810 vsp_valid = 1;
2811 }
2812
2813 /* Decode next unwind instruction. */
2814 insn = *entry++;
2815
2816 if ((insn & 0xc0) == 0)
2817 {
2818 int offset = insn & 0x3f;
2819 vsp += (offset << 2) + 4;
2820 }
2821 else if ((insn & 0xc0) == 0x40)
2822 {
2823 int offset = insn & 0x3f;
2824 vsp -= (offset << 2) + 4;
2825 }
2826 else if ((insn & 0xf0) == 0x80)
2827 {
2828 int mask = ((insn & 0xf) << 8) | *entry++;
2829 int i;
2830
2831 /* The special case of an all-zero mask identifies
2832 "Refuse to unwind". We return NULL to fall back
2833 to the prologue analyzer. */
2834 if (mask == 0)
2835 return NULL;
2836
2837 /* Pop registers r4..r15 under mask. */
2838 for (i = 0; i < 12; i++)
2839 if (mask & (1 << i))
2840 {
2841 cache->saved_regs[4 + i].set_addr (vsp);
2842 vsp += 4;
2843 }
2844
2845 /* Special-case popping SP -- we need to reload vsp. */
2846 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2847 vsp_valid = 0;
2848 }
2849 else if ((insn & 0xf0) == 0x90)
2850 {
2851 int reg = insn & 0xf;
2852
2853 /* Reserved cases. */
2854 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2855 return NULL;
2856
2857 /* Set SP from another register and mark VSP for reload. */
2858 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2859 vsp_valid = 0;
2860 }
2861 else if ((insn & 0xf0) == 0xa0)
2862 {
2863 int count = insn & 0x7;
2864 int pop_lr = (insn & 0x8) != 0;
2865 int i;
2866
2867 /* Pop r4..r[4+count]. */
2868 for (i = 0; i <= count; i++)
2869 {
2870 cache->saved_regs[4 + i].set_addr (vsp);
2871 vsp += 4;
2872 }
2873
2874 /* If indicated by flag, pop LR as well. */
2875 if (pop_lr)
2876 {
2877 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2878 vsp += 4;
2879 }
2880 }
2881 else if (insn == 0xb0)
2882 {
2883 /* We could only have updated PC by popping into it; if so, it
2884 will show up as address. Otherwise, copy LR into PC. */
2885 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2886 cache->saved_regs[ARM_PC_REGNUM]
2887 = cache->saved_regs[ARM_LR_REGNUM];
2888
2889 /* We're done. */
2890 break;
2891 }
2892 else if (insn == 0xb1)
2893 {
2894 int mask = *entry++;
2895 int i;
2896
2897 /* All-zero mask and mask >= 16 is "spare". */
2898 if (mask == 0 || mask >= 16)
2899 return NULL;
2900
2901 /* Pop r0..r3 under mask. */
2902 for (i = 0; i < 4; i++)
2903 if (mask & (1 << i))
2904 {
2905 cache->saved_regs[i].set_addr (vsp);
2906 vsp += 4;
2907 }
2908 }
2909 else if (insn == 0xb2)
2910 {
2911 ULONGEST offset = 0;
2912 unsigned shift = 0;
2913
2914 do
2915 {
2916 offset |= (*entry & 0x7f) << shift;
2917 shift += 7;
2918 }
2919 while (*entry++ & 0x80);
2920
2921 vsp += 0x204 + (offset << 2);
2922 }
2923 else if (insn == 0xb3)
2924 {
2925 int start = *entry >> 4;
2926 int count = (*entry++) & 0xf;
2927 int i;
2928
2929 /* Only registers D0..D15 are valid here. */
2930 if (start + count >= 16)
2931 return NULL;
2932
2933 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2934 for (i = 0; i <= count; i++)
2935 {
2936 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2937 vsp += 8;
2938 }
2939
2940 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2941 vsp += 4;
2942 }
2943 else if ((insn & 0xf8) == 0xb8)
2944 {
2945 int count = insn & 0x7;
2946 int i;
2947
2948 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2949 for (i = 0; i <= count; i++)
2950 {
2951 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2952 vsp += 8;
2953 }
2954
2955 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2956 vsp += 4;
2957 }
2958 else if (insn == 0xc6)
2959 {
2960 int start = *entry >> 4;
2961 int count = (*entry++) & 0xf;
2962 int i;
2963
2964 /* Only registers WR0..WR15 are valid. */
2965 if (start + count >= 16)
2966 return NULL;
2967
2968 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2969 for (i = 0; i <= count; i++)
2970 {
2971 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2972 vsp += 8;
2973 }
2974 }
2975 else if (insn == 0xc7)
2976 {
2977 int mask = *entry++;
2978 int i;
2979
2980 /* All-zero mask and mask >= 16 is "spare". */
2981 if (mask == 0 || mask >= 16)
2982 return NULL;
2983
2984 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2985 for (i = 0; i < 4; i++)
2986 if (mask & (1 << i))
2987 {
2988 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2989 vsp += 4;
2990 }
2991 }
2992 else if ((insn & 0xf8) == 0xc0)
2993 {
2994 int count = insn & 0x7;
2995 int i;
2996
2997 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2998 for (i = 0; i <= count; i++)
2999 {
3000 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3001 vsp += 8;
3002 }
3003 }
3004 else if (insn == 0xc8)
3005 {
3006 int start = *entry >> 4;
3007 int count = (*entry++) & 0xf;
3008 int i;
3009
3010 /* Only registers D0..D31 are valid. */
3011 if (start + count >= 16)
3012 return NULL;
3013
3014 /* Pop VFP double-precision registers
3015 D[16+start]..D[16+start+count]. */
3016 for (i = 0; i <= count; i++)
3017 {
3018 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3019 vsp += 8;
3020 }
3021 }
3022 else if (insn == 0xc9)
3023 {
3024 int start = *entry >> 4;
3025 int count = (*entry++) & 0xf;
3026 int i;
3027
3028 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3029 for (i = 0; i <= count; i++)
3030 {
3031 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3032 vsp += 8;
3033 }
3034 }
3035 else if ((insn & 0xf8) == 0xd0)
3036 {
3037 int count = insn & 0x7;
3038 int i;
3039
3040 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3041 for (i = 0; i <= count; i++)
3042 {
3043 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3044 vsp += 8;
3045 }
3046 }
3047 else
3048 {
3049 /* Everything else is "spare". */
3050 return NULL;
3051 }
3052 }
3053
3054 /* If we restore SP from a register, assume this was the frame register.
3055 Otherwise just fall back to SP as frame register. */
3056 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3057 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3058 else
3059 cache->framereg = ARM_SP_REGNUM;
3060
3061 /* Determine offset to previous frame. */
3062 cache->framesize
3063 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3064
3065 /* We already got the previous SP. */
3066 arm_gdbarch_tdep *tdep
3067 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3068 arm_cache_set_active_sp_value (cache, tdep, vsp);
3069
3070 return cache;
3071 }
3072
3073 /* Unwinding via ARM exception table entries. Note that the sniffer
3074 already computes a filled-in prologue cache, which is then used
3075 with the same arm_prologue_this_id and arm_prologue_prev_register
3076 routines also used for prologue-parsing based unwinding. */
3077
3078 static int
3079 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3080 frame_info_ptr this_frame,
3081 void **this_prologue_cache)
3082 {
3083 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3084 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3085 CORE_ADDR addr_in_block, exidx_region, func_start;
3086 struct arm_prologue_cache *cache;
3087 gdb_byte *entry;
3088
3089 /* See if we have an ARM exception table entry covering this address. */
3090 addr_in_block = get_frame_address_in_block (this_frame);
3091 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3092 if (!entry)
3093 return 0;
3094
3095 /* The ARM exception table does not describe unwind information
3096 for arbitrary PC values, but is guaranteed to be correct only
3097 at call sites. We have to decide here whether we want to use
3098 ARM exception table information for this frame, or fall back
3099 to using prologue parsing. (Note that if we have DWARF CFI,
3100 this sniffer isn't even called -- CFI is always preferred.)
3101
3102 Before we make this decision, however, we check whether we
3103 actually have *symbol* information for the current frame.
3104 If not, prologue parsing would not work anyway, so we might
3105 as well use the exception table and hope for the best. */
3106 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3107 {
3108 int exc_valid = 0;
3109
3110 /* If the next frame is "normal", we are at a call site in this
3111 frame, so exception information is guaranteed to be valid. */
3112 if (get_next_frame (this_frame)
3113 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3114 exc_valid = 1;
3115
3116 /* Some syscalls keep PC pointing to the SVC instruction itself. */
3117 for (int shift = 0; shift <= 1 && !exc_valid; ++shift)
3118 {
3119 /* We also assume exception information is valid if we're currently
3120 blocked in a system call. The system library is supposed to
3121 ensure this, so that e.g. pthread cancellation works. */
3122 if (arm_frame_is_thumb (this_frame))
3123 {
3124 ULONGEST insn;
3125
3126 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3127 - (shift ? 2 : 0)),
3128 2, byte_order_for_code,
3129 &insn)
3130 && (insn & 0xff00) == 0xdf00 /* svc */)
3131 exc_valid = 1;
3132 }
3133 else
3134 {
3135 ULONGEST insn;
3136
3137 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3138 - (shift ? 4 : 0)),
3139 4, byte_order_for_code,
3140 &insn)
3141 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3142 exc_valid = 1;
3143 }
3144 }
3145
3146 /* Bail out if we don't know that exception information is valid. */
3147 if (!exc_valid)
3148 return 0;
3149
3150 /* The ARM exception index does not mark the *end* of the region
3151 covered by the entry, and some functions will not have any entry.
3152 To correctly recognize the end of the covered region, the linker
3153 should have inserted dummy records with a CANTUNWIND marker.
3154
3155 Unfortunately, current versions of GNU ld do not reliably do
3156 this, and thus we may have found an incorrect entry above.
3157 As a (temporary) sanity check, we only use the entry if it
3158 lies *within* the bounds of the function. Note that this check
3159 might reject perfectly valid entries that just happen to cover
3160 multiple functions; therefore this check ought to be removed
3161 once the linker is fixed. */
3162 if (func_start > exidx_region)
3163 return 0;
3164 }
3165
3166 /* Decode the list of unwinding instructions into a prologue cache.
3167 Note that this may fail due to e.g. a "refuse to unwind" code. */
3168 cache = arm_exidx_fill_cache (this_frame, entry);
3169 if (!cache)
3170 return 0;
3171
3172 *this_prologue_cache = cache;
3173 return 1;
3174 }
3175
3176 struct frame_unwind arm_exidx_unwind = {
3177 "arm exidx",
3178 NORMAL_FRAME,
3179 default_frame_unwind_stop_reason,
3180 arm_prologue_this_id,
3181 arm_prologue_prev_register,
3182 NULL,
3183 arm_exidx_unwind_sniffer
3184 };
3185
3186 static struct arm_prologue_cache *
3187 arm_make_epilogue_frame_cache (frame_info_ptr this_frame)
3188 {
3189 struct arm_prologue_cache *cache;
3190 int reg;
3191
3192 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3193 arm_cache_init (cache, this_frame);
3194
3195 /* Still rely on the offset calculated from prologue. */
3196 arm_scan_prologue (this_frame, cache);
3197
3198 /* Since we are in epilogue, the SP has been restored. */
3199 arm_gdbarch_tdep *tdep
3200 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3201 arm_cache_set_active_sp_value (cache, tdep,
3202 get_frame_register_unsigned (this_frame,
3203 ARM_SP_REGNUM));
3204
3205 /* Calculate actual addresses of saved registers using offsets
3206 determined by arm_scan_prologue. */
3207 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3208 if (cache->saved_regs[reg].is_addr ())
3209 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3210 + arm_cache_get_prev_sp_value (cache, tdep));
3211
3212 return cache;
3213 }
3214
3215 /* Implementation of function hook 'this_id' in
3216 'struct frame_uwnind' for epilogue unwinder. */
3217
3218 static void
3219 arm_epilogue_frame_this_id (frame_info_ptr this_frame,
3220 void **this_cache,
3221 struct frame_id *this_id)
3222 {
3223 struct arm_prologue_cache *cache;
3224 CORE_ADDR pc, func;
3225
3226 if (*this_cache == NULL)
3227 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3228 cache = (struct arm_prologue_cache *) *this_cache;
3229
3230 /* Use function start address as part of the frame ID. If we cannot
3231 identify the start address (due to missing symbol information),
3232 fall back to just using the current PC. */
3233 pc = get_frame_pc (this_frame);
3234 func = get_frame_func (this_frame);
3235 if (func == 0)
3236 func = pc;
3237
3238 arm_gdbarch_tdep *tdep
3239 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3240 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3241 }
3242
3243 /* Implementation of function hook 'prev_register' in
3244 'struct frame_uwnind' for epilogue unwinder. */
3245
3246 static struct value *
3247 arm_epilogue_frame_prev_register (frame_info_ptr this_frame,
3248 void **this_cache, int regnum)
3249 {
3250 if (*this_cache == NULL)
3251 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3252
3253 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3254 }
3255
3256 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3257 CORE_ADDR pc);
3258 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3259 CORE_ADDR pc);
3260
3261 /* Implementation of function hook 'sniffer' in
3262 'struct frame_uwnind' for epilogue unwinder. */
3263
3264 static int
3265 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3266 frame_info_ptr this_frame,
3267 void **this_prologue_cache)
3268 {
3269 if (frame_relative_level (this_frame) == 0)
3270 {
3271 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3272 CORE_ADDR pc = get_frame_pc (this_frame);
3273
3274 if (arm_frame_is_thumb (this_frame))
3275 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3276 else
3277 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3278 }
3279 else
3280 return 0;
3281 }
3282
3283 /* Frame unwinder from epilogue. */
3284
3285 static const struct frame_unwind arm_epilogue_frame_unwind =
3286 {
3287 "arm epilogue",
3288 NORMAL_FRAME,
3289 default_frame_unwind_stop_reason,
3290 arm_epilogue_frame_this_id,
3291 arm_epilogue_frame_prev_register,
3292 NULL,
3293 arm_epilogue_frame_sniffer,
3294 };
3295
3296 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3297 trampoline, return the target PC. Otherwise return 0.
3298
3299 void call0a (char c, short s, int i, long l) {}
3300
3301 int main (void)
3302 {
3303 (*pointer_to_call0a) (c, s, i, l);
3304 }
3305
3306 Instead of calling a stub library function _call_via_xx (xx is
3307 the register name), GCC may inline the trampoline in the object
3308 file as below (register r2 has the address of call0a).
3309
3310 .global main
3311 .type main, %function
3312 ...
3313 bl .L1
3314 ...
3315 .size main, .-main
3316
3317 .L1:
3318 bx r2
3319
3320 The trampoline 'bx r2' doesn't belong to main. */
3321
3322 static CORE_ADDR
3323 arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc)
3324 {
3325 /* The heuristics of recognizing such trampoline is that FRAME is
3326 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3327 if (arm_frame_is_thumb (frame))
3328 {
3329 gdb_byte buf[2];
3330
3331 if (target_read_memory (pc, buf, 2) == 0)
3332 {
3333 struct gdbarch *gdbarch = get_frame_arch (frame);
3334 enum bfd_endian byte_order_for_code
3335 = gdbarch_byte_order_for_code (gdbarch);
3336 uint16_t insn
3337 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3338
3339 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3340 {
3341 CORE_ADDR dest
3342 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3343
3344 /* Clear the LSB so that gdb core sets step-resume
3345 breakpoint at the right address. */
3346 return UNMAKE_THUMB_ADDR (dest);
3347 }
3348 }
3349 }
3350
3351 return 0;
3352 }
3353
3354 static struct arm_prologue_cache *
3355 arm_make_stub_cache (frame_info_ptr this_frame)
3356 {
3357 struct arm_prologue_cache *cache;
3358
3359 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3360 arm_cache_init (cache, this_frame);
3361
3362 arm_gdbarch_tdep *tdep
3363 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3364 arm_cache_set_active_sp_value (cache, tdep,
3365 get_frame_register_unsigned (this_frame,
3366 ARM_SP_REGNUM));
3367
3368 return cache;
3369 }
3370
3371 /* Our frame ID for a stub frame is the current SP and LR. */
3372
3373 static void
3374 arm_stub_this_id (frame_info_ptr this_frame,
3375 void **this_cache,
3376 struct frame_id *this_id)
3377 {
3378 struct arm_prologue_cache *cache;
3379
3380 if (*this_cache == NULL)
3381 *this_cache = arm_make_stub_cache (this_frame);
3382 cache = (struct arm_prologue_cache *) *this_cache;
3383
3384 arm_gdbarch_tdep *tdep
3385 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3386 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3387 get_frame_pc (this_frame));
3388 }
3389
3390 static int
3391 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3392 frame_info_ptr this_frame,
3393 void **this_prologue_cache)
3394 {
3395 CORE_ADDR addr_in_block;
3396 gdb_byte dummy[4];
3397 CORE_ADDR pc, start_addr;
3398 const char *name;
3399
3400 addr_in_block = get_frame_address_in_block (this_frame);
3401 pc = get_frame_pc (this_frame);
3402 if (in_plt_section (addr_in_block)
3403 /* We also use the stub winder if the target memory is unreadable
3404 to avoid having the prologue unwinder trying to read it. */
3405 || target_read_memory (pc, dummy, 4) != 0)
3406 return 1;
3407
3408 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3409 && arm_skip_bx_reg (this_frame, pc) != 0)
3410 return 1;
3411
3412 return 0;
3413 }
3414
3415 struct frame_unwind arm_stub_unwind = {
3416 "arm stub",
3417 NORMAL_FRAME,
3418 default_frame_unwind_stop_reason,
3419 arm_stub_this_id,
3420 arm_prologue_prev_register,
3421 NULL,
3422 arm_stub_unwind_sniffer
3423 };
3424
3425 /* Put here the code to store, into CACHE->saved_regs, the addresses
3426 of the saved registers of frame described by THIS_FRAME. CACHE is
3427 returned. */
3428
3429 static struct arm_prologue_cache *
3430 arm_m_exception_cache (frame_info_ptr this_frame)
3431 {
3432 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3433 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3434 struct arm_prologue_cache *cache;
3435
3436 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3437 arm_cache_init (cache, this_frame);
3438
3439 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3440 describes which bits in LR that define which stack was used prior
3441 to the exception and if FPU is used (causing extended stack frame). */
3442
3443 /* In the lockup state PC contains a lockup magic value.
3444 The PC value of the the next outer frame is irreversibly
3445 lost. The other registers are intact so LR likely contains
3446 PC of some frame next to the outer one, but we cannot analyze
3447 the next outer frame without knowing its PC
3448 therefore we do not know SP fixup for this frame.
3449 Some heuristics to resynchronize SP might be possible.
3450 For simplicity, just terminate the unwinding to prevent it going
3451 astray and attempting to read data/addresses it shouldn't,
3452 which may cause further issues due to side-effects. */
3453 CORE_ADDR pc = get_frame_pc (this_frame);
3454 if (arm_m_addr_is_lockup (pc))
3455 {
3456 /* The lockup can be real just in the innermost frame
3457 as the CPU is stopped and cannot create more frames.
3458 If we hit lockup magic PC in the other frame, it is
3459 just a sentinel at the top of stack: do not warn then. */
3460 if (frame_relative_level (this_frame) == 0)
3461 warning (_("ARM M in lockup state, stack unwinding terminated."));
3462
3463 /* Terminate any further stack unwinding. */
3464 arm_cache_set_active_sp_value (cache, tdep, 0);
3465 return cache;
3466 }
3467
3468 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3469
3470 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3471 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3472 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3473 reset if Main Extension is implemented, otherwise the value is unknown. */
3474 if (lr == 0xffffffff)
3475 {
3476 /* Terminate any further stack unwinding. */
3477 arm_cache_set_active_sp_value (cache, tdep, 0);
3478 return cache;
3479 }
3480
3481 /* Check FNC_RETURN indicator bits (24-31). */
3482 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3483 if (fnc_return)
3484 {
3485 /* FNC_RETURN is only valid for targets with Security Extension. */
3486 if (!tdep->have_sec_ext)
3487 {
3488 error (_("While unwinding an exception frame, found unexpected Link "
3489 "Register value %s that requires the security extension, "
3490 "but the extension was not found or is disabled. This "
3491 "should not happen and may be caused by corrupt data or a "
3492 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3493 }
3494
3495 if (!arm_unwind_secure_frames)
3496 {
3497 warning (_("Non-secure to secure stack unwinding disabled."));
3498
3499 /* Terminate any further stack unwinding. */
3500 arm_cache_set_active_sp_value (cache, tdep, 0);
3501 return cache;
3502 }
3503
3504 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3505 if ((xpsr & 0x1ff) != 0)
3506 /* Handler mode: This is the mode that exceptions are handled in. */
3507 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3508 else
3509 /* Thread mode: This is the normal mode that programs run in. */
3510 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3511
3512 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3513
3514 /* Stack layout for a function call from Secure to Non-Secure state
3515 (ARMv8-M section B3.16):
3516
3517 SP Offset
3518
3519 +-------------------+
3520 0x08 | |
3521 +-------------------+ <-- Original SP
3522 0x04 | Partial xPSR |
3523 +-------------------+
3524 0x00 | Return Address |
3525 +===================+ <-- New SP */
3526
3527 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3528 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3529 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3530
3531 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3532
3533 return cache;
3534 }
3535
3536 /* Check EXC_RETURN indicator bits (24-31). */
3537 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3538 if (exc_return)
3539 {
3540 int sp_regnum;
3541 bool secure_stack_used = false;
3542 bool default_callee_register_stacking = false;
3543 bool exception_domain_is_secure = false;
3544 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3545
3546 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3547 bool process_stack_used = (bit (lr, 2) != 0);
3548
3549 if (tdep->have_sec_ext)
3550 {
3551 secure_stack_used = (bit (lr, 6) != 0);
3552 default_callee_register_stacking = (bit (lr, 5) != 0);
3553 exception_domain_is_secure = (bit (lr, 0) != 0);
3554
3555 /* Unwinding from non-secure to secure can trip security
3556 measures. In order to avoid the debugger being
3557 intrusive, rely on the user to configure the requested
3558 mode. */
3559 if (secure_stack_used && !exception_domain_is_secure
3560 && !arm_unwind_secure_frames)
3561 {
3562 warning (_("Non-secure to secure stack unwinding disabled."));
3563
3564 /* Terminate any further stack unwinding. */
3565 arm_cache_set_active_sp_value (cache, tdep, 0);
3566 return cache;
3567 }
3568
3569 if (process_stack_used)
3570 {
3571 if (secure_stack_used)
3572 /* Secure thread (process) stack used, use PSP_S as SP. */
3573 sp_regnum = tdep->m_profile_psp_s_regnum;
3574 else
3575 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3576 sp_regnum = tdep->m_profile_psp_ns_regnum;
3577 }
3578 else
3579 {
3580 if (secure_stack_used)
3581 /* Secure main stack used, use MSP_S as SP. */
3582 sp_regnum = tdep->m_profile_msp_s_regnum;
3583 else
3584 /* Non-secure main stack used, use MSP_NS as SP. */
3585 sp_regnum = tdep->m_profile_msp_ns_regnum;
3586 }
3587 }
3588 else
3589 {
3590 if (process_stack_used)
3591 /* Thread (process) stack used, use PSP as SP. */
3592 sp_regnum = tdep->m_profile_psp_regnum;
3593 else
3594 /* Main stack used, use MSP as SP. */
3595 sp_regnum = tdep->m_profile_msp_regnum;
3596 }
3597
3598 /* Set the active SP regnum. */
3599 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3600
3601 /* Fetch the SP to use for this frame. */
3602 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3603
3604 /* Exception entry context stacking are described in ARMv8-M (section
3605 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3606 Manuals.
3607
3608 The following figure shows the structure of the stack frame when
3609 Security and Floating-point extensions are present.
3610
3611 SP Offsets
3612 Without With
3613 Callee Regs Callee Regs
3614 (Secure -> Non-Secure)
3615 +-------------------+
3616 0xA8 | | 0xD0
3617 +===================+ --+ <-- Original SP
3618 0xA4 | S31 | 0xCC |
3619 +-------------------+ |
3620 ... | Additional FP context
3621 +-------------------+ |
3622 0x68 | S16 | 0x90 |
3623 +===================+ --+
3624 0x64 | Reserved | 0x8C |
3625 +-------------------+ |
3626 0x60 | FPSCR | 0x88 |
3627 +-------------------+ |
3628 0x5C | S15 | 0x84 | FP context
3629 +-------------------+ |
3630 ... |
3631 +-------------------+ |
3632 0x20 | S0 | 0x48 |
3633 +===================+ --+
3634 0x1C | xPSR | 0x44 |
3635 +-------------------+ |
3636 0x18 | Return address | 0x40 |
3637 +-------------------+ |
3638 0x14 | LR(R14) | 0x3C |
3639 +-------------------+ |
3640 0x10 | R12 | 0x38 | State context
3641 +-------------------+ |
3642 0x0C | R3 | 0x34 |
3643 +-------------------+ |
3644 ... |
3645 +-------------------+ |
3646 0x00 | R0 | 0x28 |
3647 +===================+ --+
3648 | R11 | 0x24 |
3649 +-------------------+ |
3650 ... |
3651 +-------------------+ | Additional state
3652 | R4 | 0x08 | context when
3653 +-------------------+ | transitioning from
3654 | Reserved | 0x04 | Secure to Non-Secure
3655 +-------------------+ |
3656 | Magic signature | 0x00 |
3657 +===================+ --+ <-- New SP */
3658
3659 uint32_t sp_r0_offset = 0;
3660
3661 /* With the Security extension, the hardware saves R4..R11 too. */
3662 if (tdep->have_sec_ext && secure_stack_used
3663 && (!default_callee_register_stacking || !exception_domain_is_secure))
3664 {
3665 /* Read R4..R11 from the integer callee registers. */
3666 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3667 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3668 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3669 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3670 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3671 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3672 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3673 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3674 sp_r0_offset = 0x28;
3675 }
3676
3677 /* The hardware saves eight 32-bit words, comprising xPSR,
3678 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3679 "B1.5.6 Exception entry behavior" in
3680 "ARMv7-M Architecture Reference Manual". */
3681 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3682 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3683 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3684 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3685 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3686 + 0x10);
3687 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3688 + 0x14);
3689 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3690 + 0x18);
3691 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3692 + 0x1C);
3693
3694 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3695 type used. */
3696 bool extended_frame_used = (bit (lr, 4) == 0);
3697 if (extended_frame_used)
3698 {
3699 ULONGEST fpccr;
3700 ULONGEST fpcar;
3701
3702 /* Read FPCCR register. */
3703 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3704 byte_order, &fpccr))
3705 {
3706 warning (_("Could not fetch required FPCCR content. Further "
3707 "unwinding is impossible."));
3708 arm_cache_set_active_sp_value (cache, tdep, 0);
3709 return cache;
3710 }
3711
3712 /* Read FPCAR register. */
3713 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3714 byte_order, &fpcar))
3715 {
3716 warning (_("Could not fetch FPCAR content. Further unwinding of "
3717 "FP register values will be unreliable."));
3718 fpcar = 0;
3719 }
3720
3721 bool fpccr_aspen = bit (fpccr, 31);
3722 bool fpccr_lspen = bit (fpccr, 30);
3723 bool fpccr_ts = bit (fpccr, 26);
3724 bool fpccr_lspact = bit (fpccr, 0);
3725
3726 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3727 for FP registers is enabled or disabled. The LSPACT bit indicate,
3728 together with FPCAR, if the lazy state preservation feature is
3729 active for the current frame or for another frame.
3730 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3731 supported by Cortex-M4F architecture for details. */
3732 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3733 == (fpcar & ~0x7));
3734 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3735 && fpccr_lspact
3736 && fpcar_points_to_this_frame));
3737
3738 /* Extended stack frame type used. */
3739 if (read_fp_regs_from_stack)
3740 {
3741 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3742 for (int i = 0; i < 8; i++)
3743 {
3744 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3745 addr += 8;
3746 }
3747 }
3748 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3749 + sp_r0_offset + 0x60);
3750
3751 if (tdep->have_sec_ext && !default_callee_register_stacking
3752 && fpccr_ts)
3753 {
3754 /* Handle floating-point callee saved registers. */
3755 if (read_fp_regs_from_stack)
3756 {
3757 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3758 for (int i = 8; i < 16; i++)
3759 {
3760 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3761 addr += 8;
3762 }
3763 }
3764
3765 arm_cache_set_active_sp_value (cache, tdep,
3766 unwound_sp + sp_r0_offset + 0xA8);
3767 }
3768 else
3769 {
3770 /* Offset 0x64 is reserved. */
3771 arm_cache_set_active_sp_value (cache, tdep,
3772 unwound_sp + sp_r0_offset + 0x68);
3773 }
3774 }
3775 else
3776 {
3777 /* Standard stack frame type used. */
3778 arm_cache_set_active_sp_value (cache, tdep,
3779 unwound_sp + sp_r0_offset + 0x20);
3780 }
3781
3782 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3783 aligner between the top of the 32-byte stack frame and the
3784 previous context's stack pointer. */
3785 ULONGEST xpsr;
3786 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3787 .addr (), ARM_INT_REGISTER_SIZE,
3788 byte_order, &xpsr))
3789 {
3790 warning (_("Could not fetch required XPSR content. Further "
3791 "unwinding is impossible."));
3792 arm_cache_set_active_sp_value (cache, tdep, 0);
3793 return cache;
3794 }
3795
3796 if (bit (xpsr, 9) != 0)
3797 {
3798 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3799 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3800 }
3801
3802 return cache;
3803 }
3804
3805 internal_error (_("While unwinding an exception frame, "
3806 "found unexpected Link Register value "
3807 "%s. This should not happen and may "
3808 "be caused by corrupt data or a bug in"
3809 " GDB."),
3810 phex (lr, ARM_INT_REGISTER_SIZE));
3811 }
3812
3813 /* Implementation of the stop_reason hook for arm_m_exception frames. */
3814
3815 static enum unwind_stop_reason
3816 arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame,
3817 void **this_cache)
3818 {
3819 struct arm_prologue_cache *cache;
3820 arm_gdbarch_tdep *tdep
3821 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3822
3823 if (*this_cache == NULL)
3824 *this_cache = arm_m_exception_cache (this_frame);
3825 cache = (struct arm_prologue_cache *) *this_cache;
3826
3827 /* If we've hit a wall, stop. */
3828 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3829 return UNWIND_OUTERMOST;
3830
3831 return UNWIND_NO_REASON;
3832 }
3833
3834 /* Implementation of function hook 'this_id' in
3835 'struct frame_uwnind'. */
3836
3837 static void
3838 arm_m_exception_this_id (frame_info_ptr this_frame,
3839 void **this_cache,
3840 struct frame_id *this_id)
3841 {
3842 struct arm_prologue_cache *cache;
3843
3844 if (*this_cache == NULL)
3845 *this_cache = arm_m_exception_cache (this_frame);
3846 cache = (struct arm_prologue_cache *) *this_cache;
3847
3848 /* Our frame ID for a stub frame is the current SP and LR. */
3849 arm_gdbarch_tdep *tdep
3850 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3851 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3852 get_frame_pc (this_frame));
3853 }
3854
3855 /* Implementation of function hook 'prev_register' in
3856 'struct frame_uwnind'. */
3857
3858 static struct value *
3859 arm_m_exception_prev_register (frame_info_ptr this_frame,
3860 void **this_cache,
3861 int prev_regnum)
3862 {
3863 struct arm_prologue_cache *cache;
3864 CORE_ADDR sp_value;
3865
3866 if (*this_cache == NULL)
3867 *this_cache = arm_m_exception_cache (this_frame);
3868 cache = (struct arm_prologue_cache *) *this_cache;
3869
3870 /* The value was already reconstructed into PREV_SP. */
3871 arm_gdbarch_tdep *tdep
3872 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3873 if (prev_regnum == ARM_SP_REGNUM)
3874 return frame_unwind_got_constant (this_frame, prev_regnum,
3875 arm_cache_get_prev_sp_value (cache, tdep));
3876
3877 /* If we are asked to unwind the PC, strip the saved T bit. */
3878 if (prev_regnum == ARM_PC_REGNUM)
3879 {
3880 struct value *value = trad_frame_get_prev_register (this_frame,
3881 cache->saved_regs,
3882 prev_regnum);
3883 CORE_ADDR pc = value_as_address (value);
3884 return frame_unwind_got_constant (this_frame, prev_regnum,
3885 UNMAKE_THUMB_ADDR (pc));
3886 }
3887
3888 /* The value might be one of the alternative SP, if so, use the
3889 value already constructed. */
3890 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3891 {
3892 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3893 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3894 }
3895
3896 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3897 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3898 pattern. */
3899 if (prev_regnum == ARM_PS_REGNUM)
3900 {
3901 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3902 struct value *value = trad_frame_get_prev_register (this_frame,
3903 cache->saved_regs,
3904 ARM_PC_REGNUM);
3905 CORE_ADDR pc = value_as_address (value);
3906 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3907 ARM_PS_REGNUM);
3908 ULONGEST xpsr = value_as_long (value);
3909
3910 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3911 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3912 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3913 }
3914
3915 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3916 prev_regnum);
3917 }
3918
3919 /* Implementation of function hook 'sniffer' in
3920 'struct frame_uwnind'. */
3921
3922 static int
3923 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3924 frame_info_ptr this_frame,
3925 void **this_prologue_cache)
3926 {
3927 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3928 CORE_ADDR this_pc = get_frame_pc (this_frame);
3929
3930 /* No need to check is_m; this sniffer is only registered for
3931 M-profile architectures. */
3932
3933 /* Check if exception frame returns to a magic PC value. */
3934 return arm_m_addr_is_magic (gdbarch, this_pc);
3935 }
3936
3937 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3938 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3939
3940 struct frame_unwind arm_m_exception_unwind =
3941 {
3942 "arm m exception lockup sec_fnc",
3943 SIGTRAMP_FRAME,
3944 arm_m_exception_frame_unwind_stop_reason,
3945 arm_m_exception_this_id,
3946 arm_m_exception_prev_register,
3947 NULL,
3948 arm_m_exception_unwind_sniffer
3949 };
3950
3951 static CORE_ADDR
3952 arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache)
3953 {
3954 struct arm_prologue_cache *cache;
3955
3956 if (*this_cache == NULL)
3957 *this_cache = arm_make_prologue_cache (this_frame);
3958 cache = (struct arm_prologue_cache *) *this_cache;
3959
3960 arm_gdbarch_tdep *tdep
3961 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3962 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3963 }
3964
3965 struct frame_base arm_normal_base = {
3966 &arm_prologue_unwind,
3967 arm_normal_frame_base,
3968 arm_normal_frame_base,
3969 arm_normal_frame_base
3970 };
3971
3972 struct arm_dwarf2_prev_register_cache
3973 {
3974 /* Cached value of the coresponding stack pointer for the inner frame. */
3975 CORE_ADDR sp;
3976 CORE_ADDR msp;
3977 CORE_ADDR msp_s;
3978 CORE_ADDR msp_ns;
3979 CORE_ADDR psp;
3980 CORE_ADDR psp_s;
3981 CORE_ADDR psp_ns;
3982 };
3983
3984 static struct value *
3985 arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache,
3986 int regnum)
3987 {
3988 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3989 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3990 CORE_ADDR lr;
3991 ULONGEST cpsr;
3992 arm_dwarf2_prev_register_cache *cache
3993 = ((arm_dwarf2_prev_register_cache *)
3994 dwarf2_frame_get_fn_data (this_frame, this_cache,
3995 arm_dwarf2_prev_register));
3996
3997 if (!cache)
3998 {
3999 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache);
4000 cache = ((arm_dwarf2_prev_register_cache *)
4001 dwarf2_frame_allocate_fn_data (this_frame, this_cache,
4002 arm_dwarf2_prev_register, size));
4003
4004 if (tdep->have_sec_ext)
4005 {
4006 cache->sp
4007 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4008
4009 cache->msp_s
4010 = get_frame_register_unsigned (this_frame,
4011 tdep->m_profile_msp_s_regnum);
4012 cache->msp_ns
4013 = get_frame_register_unsigned (this_frame,
4014 tdep->m_profile_msp_ns_regnum);
4015 cache->psp_s
4016 = get_frame_register_unsigned (this_frame,
4017 tdep->m_profile_psp_s_regnum);
4018 cache->psp_ns
4019 = get_frame_register_unsigned (this_frame,
4020 tdep->m_profile_psp_ns_regnum);
4021 }
4022 else if (tdep->is_m)
4023 {
4024 cache->sp
4025 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4026
4027 cache->msp
4028 = get_frame_register_unsigned (this_frame,
4029 tdep->m_profile_msp_regnum);
4030 cache->psp
4031 = get_frame_register_unsigned (this_frame,
4032 tdep->m_profile_psp_regnum);
4033 }
4034 }
4035
4036 if (regnum == ARM_PC_REGNUM)
4037 {
4038 /* The PC is normally copied from the return column, which
4039 describes saves of LR. However, that version may have an
4040 extra bit set to indicate Thumb state. The bit is not
4041 part of the PC. */
4042
4043 /* Record in the frame whether the return address was signed. */
4044 if (tdep->have_pacbti)
4045 {
4046 CORE_ADDR ra_auth_code
4047 = frame_unwind_register_unsigned (this_frame,
4048 tdep->pacbti_pseudo_base);
4049
4050 if (ra_auth_code != 0)
4051 set_frame_previous_pc_masked (this_frame);
4052 }
4053
4054 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4055 return frame_unwind_got_constant (this_frame, regnum,
4056 arm_addr_bits_remove (gdbarch, lr));
4057 }
4058 else if (regnum == ARM_PS_REGNUM)
4059 {
4060 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4061 cpsr = get_frame_register_unsigned (this_frame, regnum);
4062 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4063 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4064 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4065 }
4066 else if (arm_is_alternative_sp_register (tdep, regnum))
4067 {
4068 /* Handle the alternative SP registers on Cortex-M. */
4069 bool override_with_sp_value = false;
4070 CORE_ADDR val;
4071
4072 if (tdep->have_sec_ext)
4073 {
4074 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4075 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp);
4076 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4077 && (cache->msp_s == cache->sp);
4078 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4079 && (cache->msp_ns == cache->sp);
4080 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4081 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp);
4082 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4083 && (cache->psp_s == cache->sp);
4084 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4085 && (cache->psp_ns == cache->sp);
4086
4087 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4088 || is_psp || is_psp_s || is_psp_ns;
4089
4090 }
4091 else if (tdep->is_m)
4092 {
4093 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4094 && (cache->sp == cache->msp);
4095 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4096 && (cache->sp == cache->psp);
4097
4098 override_with_sp_value = is_msp || is_psp;
4099 }
4100
4101 if (override_with_sp_value)
4102 {
4103 /* Use value of SP from previous frame. */
4104 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4105 if (prev_frame)
4106 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4107 else
4108 val = get_frame_base (this_frame);
4109 }
4110 else
4111 /* Use value for the register from previous frame. */
4112 val = get_frame_register_unsigned (this_frame, regnum);
4113
4114 return frame_unwind_got_constant (this_frame, regnum, val);
4115 }
4116
4117 internal_error (_("Unexpected register %d"), regnum);
4118 }
4119
4120 /* Implement the stack_frame_destroyed_p gdbarch method. */
4121
4122 static int
4123 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4124 {
4125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4126 unsigned int insn, insn2;
4127 int found_return = 0, found_stack_adjust = 0;
4128 CORE_ADDR func_start, func_end;
4129 CORE_ADDR scan_pc;
4130 gdb_byte buf[4];
4131
4132 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4133 return 0;
4134
4135 /* The epilogue is a sequence of instructions along the following lines:
4136
4137 - add stack frame size to SP or FP
4138 - [if frame pointer used] restore SP from FP
4139 - restore registers from SP [may include PC]
4140 - a return-type instruction [if PC wasn't already restored]
4141
4142 In a first pass, we scan forward from the current PC and verify the
4143 instructions we find as compatible with this sequence, ending in a
4144 return instruction.
4145
4146 However, this is not sufficient to distinguish indirect function calls
4147 within a function from indirect tail calls in the epilogue in some cases.
4148 Therefore, if we didn't already find any SP-changing instruction during
4149 forward scan, we add a backward scanning heuristic to ensure we actually
4150 are in the epilogue. */
4151
4152 scan_pc = pc;
4153 while (scan_pc < func_end && !found_return)
4154 {
4155 if (target_read_memory (scan_pc, buf, 2))
4156 break;
4157
4158 scan_pc += 2;
4159 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4160
4161 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4162 found_return = 1;
4163 else if (insn == 0x46f7) /* mov pc, lr */
4164 found_return = 1;
4165 else if (thumb_instruction_restores_sp (insn))
4166 {
4167 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4168 found_return = 1;
4169 }
4170 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4171 {
4172 if (target_read_memory (scan_pc, buf, 2))
4173 break;
4174
4175 scan_pc += 2;
4176 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4177
4178 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4179 {
4180 if (insn2 & 0x8000) /* <registers> include PC. */
4181 found_return = 1;
4182 }
4183 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4184 && (insn2 & 0x0fff) == 0x0b04)
4185 {
4186 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4187 found_return = 1;
4188 }
4189 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4190 && (insn2 & 0x0e00) == 0x0a00)
4191 ;
4192 else
4193 break;
4194 }
4195 else
4196 break;
4197 }
4198
4199 if (!found_return)
4200 return 0;
4201
4202 /* Since any instruction in the epilogue sequence, with the possible
4203 exception of return itself, updates the stack pointer, we need to
4204 scan backwards for at most one instruction. Try either a 16-bit or
4205 a 32-bit instruction. This is just a heuristic, so we do not worry
4206 too much about false positives. */
4207
4208 if (pc - 4 < func_start)
4209 return 0;
4210 if (target_read_memory (pc - 4, buf, 4))
4211 return 0;
4212
4213 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4214 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4215
4216 if (thumb_instruction_restores_sp (insn2))
4217 found_stack_adjust = 1;
4218 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4219 found_stack_adjust = 1;
4220 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4221 && (insn2 & 0x0fff) == 0x0b04)
4222 found_stack_adjust = 1;
4223 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4224 && (insn2 & 0x0e00) == 0x0a00)
4225 found_stack_adjust = 1;
4226
4227 return found_stack_adjust;
4228 }
4229
4230 static int
4231 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4232 {
4233 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4234 unsigned int insn;
4235 int found_return;
4236 CORE_ADDR func_start, func_end;
4237
4238 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4239 return 0;
4240
4241 /* We are in the epilogue if the previous instruction was a stack
4242 adjustment and the next instruction is a possible return (bx, mov
4243 pc, or pop). We could have to scan backwards to find the stack
4244 adjustment, or forwards to find the return, but this is a decent
4245 approximation. First scan forwards. */
4246
4247 found_return = 0;
4248 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4249 if (bits (insn, 28, 31) != INST_NV)
4250 {
4251 if ((insn & 0x0ffffff0) == 0x012fff10)
4252 /* BX. */
4253 found_return = 1;
4254 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4255 /* MOV PC. */
4256 found_return = 1;
4257 else if ((insn & 0x0fff0000) == 0x08bd0000
4258 && (insn & 0x0000c000) != 0)
4259 /* POP (LDMIA), including PC or LR. */
4260 found_return = 1;
4261 }
4262
4263 if (!found_return)
4264 return 0;
4265
4266 /* Scan backwards. This is just a heuristic, so do not worry about
4267 false positives from mode changes. */
4268
4269 if (pc < func_start + 4)
4270 return 0;
4271
4272 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4273 if (arm_instruction_restores_sp (insn))
4274 return 1;
4275
4276 return 0;
4277 }
4278
4279 /* Implement the stack_frame_destroyed_p gdbarch method. */
4280
4281 static int
4282 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4283 {
4284 if (arm_pc_is_thumb (gdbarch, pc))
4285 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4286 else
4287 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4288 }
4289
4290 /* When arguments must be pushed onto the stack, they go on in reverse
4291 order. The code below implements a FILO (stack) to do this. */
4292
4293 struct arm_stack_item
4294 {
4295 int len;
4296 struct arm_stack_item *prev;
4297 gdb_byte *data;
4298 };
4299
4300 static struct arm_stack_item *
4301 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4302 int len)
4303 {
4304 struct arm_stack_item *si;
4305 si = XNEW (struct arm_stack_item);
4306 si->data = (gdb_byte *) xmalloc (len);
4307 si->len = len;
4308 si->prev = prev;
4309 memcpy (si->data, contents, len);
4310 return si;
4311 }
4312
4313 static struct arm_stack_item *
4314 pop_stack_item (struct arm_stack_item *si)
4315 {
4316 struct arm_stack_item *dead = si;
4317 si = si->prev;
4318 xfree (dead->data);
4319 xfree (dead);
4320 return si;
4321 }
4322
4323 /* Implement the gdbarch type alignment method, overrides the generic
4324 alignment algorithm for anything that is arm specific. */
4325
4326 static ULONGEST
4327 arm_type_align (gdbarch *gdbarch, struct type *t)
4328 {
4329 t = check_typedef (t);
4330 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4331 {
4332 /* Use the natural alignment for vector types (the same for
4333 scalar type), but the maximum alignment is 64-bit. */
4334 if (t->length () > 8)
4335 return 8;
4336 else
4337 return t->length ();
4338 }
4339
4340 /* Allow the common code to calculate the alignment. */
4341 return 0;
4342 }
4343
4344 /* Possible base types for a candidate for passing and returning in
4345 VFP registers. */
4346
4347 enum arm_vfp_cprc_base_type
4348 {
4349 VFP_CPRC_UNKNOWN,
4350 VFP_CPRC_SINGLE,
4351 VFP_CPRC_DOUBLE,
4352 VFP_CPRC_VEC64,
4353 VFP_CPRC_VEC128
4354 };
4355
4356 /* The length of one element of base type B. */
4357
4358 static unsigned
4359 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4360 {
4361 switch (b)
4362 {
4363 case VFP_CPRC_SINGLE:
4364 return 4;
4365 case VFP_CPRC_DOUBLE:
4366 return 8;
4367 case VFP_CPRC_VEC64:
4368 return 8;
4369 case VFP_CPRC_VEC128:
4370 return 16;
4371 default:
4372 internal_error (_("Invalid VFP CPRC type: %d."),
4373 (int) b);
4374 }
4375 }
4376
4377 /* The character ('s', 'd' or 'q') for the type of VFP register used
4378 for passing base type B. */
4379
4380 static int
4381 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4382 {
4383 switch (b)
4384 {
4385 case VFP_CPRC_SINGLE:
4386 return 's';
4387 case VFP_CPRC_DOUBLE:
4388 return 'd';
4389 case VFP_CPRC_VEC64:
4390 return 'd';
4391 case VFP_CPRC_VEC128:
4392 return 'q';
4393 default:
4394 internal_error (_("Invalid VFP CPRC type: %d."),
4395 (int) b);
4396 }
4397 }
4398
4399 /* Determine whether T may be part of a candidate for passing and
4400 returning in VFP registers, ignoring the limit on the total number
4401 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4402 classification of the first valid component found; if it is not
4403 VFP_CPRC_UNKNOWN, all components must have the same classification
4404 as *BASE_TYPE. If it is found that T contains a type not permitted
4405 for passing and returning in VFP registers, a type differently
4406 classified from *BASE_TYPE, or two types differently classified
4407 from each other, return -1, otherwise return the total number of
4408 base-type elements found (possibly 0 in an empty structure or
4409 array). Vector types are not currently supported, matching the
4410 generic AAPCS support. */
4411
4412 static int
4413 arm_vfp_cprc_sub_candidate (struct type *t,
4414 enum arm_vfp_cprc_base_type *base_type)
4415 {
4416 t = check_typedef (t);
4417 switch (t->code ())
4418 {
4419 case TYPE_CODE_FLT:
4420 switch (t->length ())
4421 {
4422 case 4:
4423 if (*base_type == VFP_CPRC_UNKNOWN)
4424 *base_type = VFP_CPRC_SINGLE;
4425 else if (*base_type != VFP_CPRC_SINGLE)
4426 return -1;
4427 return 1;
4428
4429 case 8:
4430 if (*base_type == VFP_CPRC_UNKNOWN)
4431 *base_type = VFP_CPRC_DOUBLE;
4432 else if (*base_type != VFP_CPRC_DOUBLE)
4433 return -1;
4434 return 1;
4435
4436 default:
4437 return -1;
4438 }
4439 break;
4440
4441 case TYPE_CODE_COMPLEX:
4442 /* Arguments of complex T where T is one of the types float or
4443 double get treated as if they are implemented as:
4444
4445 struct complexT
4446 {
4447 T real;
4448 T imag;
4449 };
4450
4451 */
4452 switch (t->length ())
4453 {
4454 case 8:
4455 if (*base_type == VFP_CPRC_UNKNOWN)
4456 *base_type = VFP_CPRC_SINGLE;
4457 else if (*base_type != VFP_CPRC_SINGLE)
4458 return -1;
4459 return 2;
4460
4461 case 16:
4462 if (*base_type == VFP_CPRC_UNKNOWN)
4463 *base_type = VFP_CPRC_DOUBLE;
4464 else if (*base_type != VFP_CPRC_DOUBLE)
4465 return -1;
4466 return 2;
4467
4468 default:
4469 return -1;
4470 }
4471 break;
4472
4473 case TYPE_CODE_ARRAY:
4474 {
4475 if (t->is_vector ())
4476 {
4477 /* A 64-bit or 128-bit containerized vector type are VFP
4478 CPRCs. */
4479 switch (t->length ())
4480 {
4481 case 8:
4482 if (*base_type == VFP_CPRC_UNKNOWN)
4483 *base_type = VFP_CPRC_VEC64;
4484 return 1;
4485 case 16:
4486 if (*base_type == VFP_CPRC_UNKNOWN)
4487 *base_type = VFP_CPRC_VEC128;
4488 return 1;
4489 default:
4490 return -1;
4491 }
4492 }
4493 else
4494 {
4495 int count;
4496 unsigned unitlen;
4497
4498 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4499 base_type);
4500 if (count == -1)
4501 return -1;
4502 if (t->length () == 0)
4503 {
4504 gdb_assert (count == 0);
4505 return 0;
4506 }
4507 else if (count == 0)
4508 return -1;
4509 unitlen = arm_vfp_cprc_unit_length (*base_type);
4510 gdb_assert ((t->length () % unitlen) == 0);
4511 return t->length () / unitlen;
4512 }
4513 }
4514 break;
4515
4516 case TYPE_CODE_STRUCT:
4517 {
4518 int count = 0;
4519 unsigned unitlen;
4520 int i;
4521 for (i = 0; i < t->num_fields (); i++)
4522 {
4523 int sub_count = 0;
4524
4525 if (!t->field (i).is_static ())
4526 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4527 base_type);
4528 if (sub_count == -1)
4529 return -1;
4530 count += sub_count;
4531 }
4532 if (t->length () == 0)
4533 {
4534 gdb_assert (count == 0);
4535 return 0;
4536 }
4537 else if (count == 0)
4538 return -1;
4539 unitlen = arm_vfp_cprc_unit_length (*base_type);
4540 if (t->length () != unitlen * count)
4541 return -1;
4542 return count;
4543 }
4544
4545 case TYPE_CODE_UNION:
4546 {
4547 int count = 0;
4548 unsigned unitlen;
4549 int i;
4550 for (i = 0; i < t->num_fields (); i++)
4551 {
4552 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4553 base_type);
4554 if (sub_count == -1)
4555 return -1;
4556 count = (count > sub_count ? count : sub_count);
4557 }
4558 if (t->length () == 0)
4559 {
4560 gdb_assert (count == 0);
4561 return 0;
4562 }
4563 else if (count == 0)
4564 return -1;
4565 unitlen = arm_vfp_cprc_unit_length (*base_type);
4566 if (t->length () != unitlen * count)
4567 return -1;
4568 return count;
4569 }
4570
4571 default:
4572 break;
4573 }
4574
4575 return -1;
4576 }
4577
4578 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4579 if passed to or returned from a non-variadic function with the VFP
4580 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4581 *BASE_TYPE to the base type for T and *COUNT to the number of
4582 elements of that base type before returning. */
4583
4584 static int
4585 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4586 int *count)
4587 {
4588 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4589 int c = arm_vfp_cprc_sub_candidate (t, &b);
4590 if (c <= 0 || c > 4)
4591 return 0;
4592 *base_type = b;
4593 *count = c;
4594 return 1;
4595 }
4596
4597 /* Return 1 if the VFP ABI should be used for passing arguments to and
4598 returning values from a function of type FUNC_TYPE, 0
4599 otherwise. */
4600
4601 static int
4602 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4603 {
4604 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4605
4606 /* Variadic functions always use the base ABI. Assume that functions
4607 without debug info are not variadic. */
4608 if (func_type && check_typedef (func_type)->has_varargs ())
4609 return 0;
4610
4611 /* The VFP ABI is only supported as a variant of AAPCS. */
4612 if (tdep->arm_abi != ARM_ABI_AAPCS)
4613 return 0;
4614
4615 return tdep->fp_model == ARM_FLOAT_VFP;
4616 }
4617
4618 /* We currently only support passing parameters in integer registers, which
4619 conforms with GCC's default model, and VFP argument passing following
4620 the VFP variant of AAPCS. Several other variants exist and
4621 we should probably support some of them based on the selected ABI. */
4622
4623 static CORE_ADDR
4624 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4625 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4626 struct value **args, CORE_ADDR sp,
4627 function_call_return_method return_method,
4628 CORE_ADDR struct_addr)
4629 {
4630 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4631 int argnum;
4632 int argreg;
4633 int nstack;
4634 struct arm_stack_item *si = NULL;
4635 int use_vfp_abi;
4636 struct type *ftype;
4637 unsigned vfp_regs_free = (1 << 16) - 1;
4638 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4639
4640 /* Determine the type of this function and whether the VFP ABI
4641 applies. */
4642 ftype = check_typedef (function->type ());
4643 if (ftype->code () == TYPE_CODE_PTR)
4644 ftype = check_typedef (ftype->target_type ());
4645 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4646
4647 /* Set the return address. For the ARM, the return breakpoint is
4648 always at BP_ADDR. */
4649 if (arm_pc_is_thumb (gdbarch, bp_addr))
4650 bp_addr |= 1;
4651 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4652
4653 /* Walk through the list of args and determine how large a temporary
4654 stack is required. Need to take care here as structs may be
4655 passed on the stack, and we have to push them. */
4656 nstack = 0;
4657
4658 argreg = ARM_A1_REGNUM;
4659 nstack = 0;
4660
4661 /* The struct_return pointer occupies the first parameter
4662 passing register. */
4663 if (return_method == return_method_struct)
4664 {
4665 arm_debug_printf ("struct return in %s = %s",
4666 gdbarch_register_name (gdbarch, argreg),
4667 paddress (gdbarch, struct_addr));
4668
4669 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4670 argreg++;
4671 }
4672
4673 for (argnum = 0; argnum < nargs; argnum++)
4674 {
4675 int len;
4676 struct type *arg_type;
4677 struct type *target_type;
4678 enum type_code typecode;
4679 const bfd_byte *val;
4680 int align;
4681 enum arm_vfp_cprc_base_type vfp_base_type;
4682 int vfp_base_count;
4683 int may_use_core_reg = 1;
4684
4685 arg_type = check_typedef (args[argnum]->type ());
4686 len = arg_type->length ();
4687 target_type = arg_type->target_type ();
4688 typecode = arg_type->code ();
4689 val = args[argnum]->contents ().data ();
4690
4691 align = type_align (arg_type);
4692 /* Round alignment up to a whole number of words. */
4693 align = (align + ARM_INT_REGISTER_SIZE - 1)
4694 & ~(ARM_INT_REGISTER_SIZE - 1);
4695 /* Different ABIs have different maximum alignments. */
4696 if (tdep->arm_abi == ARM_ABI_APCS)
4697 {
4698 /* The APCS ABI only requires word alignment. */
4699 align = ARM_INT_REGISTER_SIZE;
4700 }
4701 else
4702 {
4703 /* The AAPCS requires at most doubleword alignment. */
4704 if (align > ARM_INT_REGISTER_SIZE * 2)
4705 align = ARM_INT_REGISTER_SIZE * 2;
4706 }
4707
4708 if (use_vfp_abi
4709 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4710 &vfp_base_count))
4711 {
4712 int regno;
4713 int unit_length;
4714 int shift;
4715 unsigned mask;
4716
4717 /* Because this is a CPRC it cannot go in a core register or
4718 cause a core register to be skipped for alignment.
4719 Either it goes in VFP registers and the rest of this loop
4720 iteration is skipped for this argument, or it goes on the
4721 stack (and the stack alignment code is correct for this
4722 case). */
4723 may_use_core_reg = 0;
4724
4725 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4726 shift = unit_length / 4;
4727 mask = (1 << (shift * vfp_base_count)) - 1;
4728 for (regno = 0; regno < 16; regno += shift)
4729 if (((vfp_regs_free >> regno) & mask) == mask)
4730 break;
4731
4732 if (regno < 16)
4733 {
4734 int reg_char;
4735 int reg_scaled;
4736 int i;
4737
4738 vfp_regs_free &= ~(mask << regno);
4739 reg_scaled = regno / shift;
4740 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4741 for (i = 0; i < vfp_base_count; i++)
4742 {
4743 char name_buf[4];
4744 int regnum;
4745 if (reg_char == 'q')
4746 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4747 val + i * unit_length);
4748 else
4749 {
4750 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4751 reg_char, reg_scaled + i);
4752 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4753 strlen (name_buf));
4754 regcache->cooked_write (regnum, val + i * unit_length);
4755 }
4756 }
4757 continue;
4758 }
4759 else
4760 {
4761 /* This CPRC could not go in VFP registers, so all VFP
4762 registers are now marked as used. */
4763 vfp_regs_free = 0;
4764 }
4765 }
4766
4767 /* Push stack padding for doubleword alignment. */
4768 if (nstack & (align - 1))
4769 {
4770 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4771 nstack += ARM_INT_REGISTER_SIZE;
4772 }
4773
4774 /* Doubleword aligned quantities must go in even register pairs. */
4775 if (may_use_core_reg
4776 && argreg <= ARM_LAST_ARG_REGNUM
4777 && align > ARM_INT_REGISTER_SIZE
4778 && argreg & 1)
4779 argreg++;
4780
4781 /* If the argument is a pointer to a function, and it is a
4782 Thumb function, create a LOCAL copy of the value and set
4783 the THUMB bit in it. */
4784 if (TYPE_CODE_PTR == typecode
4785 && target_type != NULL
4786 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4787 {
4788 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4789 if (arm_pc_is_thumb (gdbarch, regval))
4790 {
4791 bfd_byte *copy = (bfd_byte *) alloca (len);
4792 store_unsigned_integer (copy, len, byte_order,
4793 MAKE_THUMB_ADDR (regval));
4794 val = copy;
4795 }
4796 }
4797
4798 /* Copy the argument to general registers or the stack in
4799 register-sized pieces. Large arguments are split between
4800 registers and stack. */
4801 while (len > 0)
4802 {
4803 int partial_len = len < ARM_INT_REGISTER_SIZE
4804 ? len : ARM_INT_REGISTER_SIZE;
4805 CORE_ADDR regval
4806 = extract_unsigned_integer (val, partial_len, byte_order);
4807
4808 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4809 {
4810 /* The argument is being passed in a general purpose
4811 register. */
4812 if (byte_order == BFD_ENDIAN_BIG)
4813 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4814
4815 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4816 gdbarch_register_name (gdbarch, argreg),
4817 phex (regval, ARM_INT_REGISTER_SIZE));
4818
4819 regcache_cooked_write_unsigned (regcache, argreg, regval);
4820 argreg++;
4821 }
4822 else
4823 {
4824 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4825
4826 memset (buf, 0, sizeof (buf));
4827 store_unsigned_integer (buf, partial_len, byte_order, regval);
4828
4829 /* Push the arguments onto the stack. */
4830 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4831 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4832 nstack += ARM_INT_REGISTER_SIZE;
4833 }
4834
4835 len -= partial_len;
4836 val += partial_len;
4837 }
4838 }
4839 /* If we have an odd number of words to push, then decrement the stack
4840 by one word now, so first stack argument will be dword aligned. */
4841 if (nstack & 4)
4842 sp -= 4;
4843
4844 while (si)
4845 {
4846 sp -= si->len;
4847 write_memory (sp, si->data, si->len);
4848 si = pop_stack_item (si);
4849 }
4850
4851 /* Finally, update teh SP register. */
4852 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4853
4854 return sp;
4855 }
4856
4857
4858 /* Always align the frame to an 8-byte boundary. This is required on
4859 some platforms and harmless on the rest. */
4860
4861 static CORE_ADDR
4862 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4863 {
4864 /* Align the stack to eight bytes. */
4865 return sp & ~ (CORE_ADDR) 7;
4866 }
4867
4868 static void
4869 print_fpu_flags (struct ui_file *file, int flags)
4870 {
4871 if (flags & (1 << 0))
4872 gdb_puts ("IVO ", file);
4873 if (flags & (1 << 1))
4874 gdb_puts ("DVZ ", file);
4875 if (flags & (1 << 2))
4876 gdb_puts ("OFL ", file);
4877 if (flags & (1 << 3))
4878 gdb_puts ("UFL ", file);
4879 if (flags & (1 << 4))
4880 gdb_puts ("INX ", file);
4881 gdb_putc ('\n', file);
4882 }
4883
4884 /* Print interesting information about the floating point processor
4885 (if present) or emulator. */
4886 static void
4887 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4888 frame_info_ptr frame, const char *args)
4889 {
4890 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4891 int type;
4892
4893 type = (status >> 24) & 127;
4894 if (status & (1 << 31))
4895 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4896 else
4897 gdb_printf (file, _("Software FPU type %d\n"), type);
4898 /* i18n: [floating point unit] mask */
4899 gdb_puts (_("mask: "), file);
4900 print_fpu_flags (file, status >> 16);
4901 /* i18n: [floating point unit] flags */
4902 gdb_puts (_("flags: "), file);
4903 print_fpu_flags (file, status);
4904 }
4905
4906 /* Construct the ARM extended floating point type. */
4907 static struct type *
4908 arm_ext_type (struct gdbarch *gdbarch)
4909 {
4910 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4911
4912 if (!tdep->arm_ext_type)
4913 {
4914 type_allocator alloc (gdbarch);
4915 tdep->arm_ext_type
4916 = init_float_type (alloc, -1, "builtin_type_arm_ext",
4917 floatformats_arm_ext);
4918 }
4919
4920 return tdep->arm_ext_type;
4921 }
4922
4923 static struct type *
4924 arm_neon_double_type (struct gdbarch *gdbarch)
4925 {
4926 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4927
4928 if (tdep->neon_double_type == NULL)
4929 {
4930 struct type *t, *elem;
4931
4932 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4933 TYPE_CODE_UNION);
4934 elem = builtin_type (gdbarch)->builtin_uint8;
4935 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4936 elem = builtin_type (gdbarch)->builtin_uint16;
4937 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4938 elem = builtin_type (gdbarch)->builtin_uint32;
4939 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4940 elem = builtin_type (gdbarch)->builtin_uint64;
4941 append_composite_type_field (t, "u64", elem);
4942 elem = builtin_type (gdbarch)->builtin_float;
4943 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4944 elem = builtin_type (gdbarch)->builtin_double;
4945 append_composite_type_field (t, "f64", elem);
4946
4947 t->set_is_vector (true);
4948 t->set_name ("neon_d");
4949 tdep->neon_double_type = t;
4950 }
4951
4952 return tdep->neon_double_type;
4953 }
4954
4955 /* FIXME: The vector types are not correctly ordered on big-endian
4956 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4957 bits of d0 - regardless of what unit size is being held in d0. So
4958 the offset of the first uint8 in d0 is 7, but the offset of the
4959 first float is 4. This code works as-is for little-endian
4960 targets. */
4961
4962 static struct type *
4963 arm_neon_quad_type (struct gdbarch *gdbarch)
4964 {
4965 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4966
4967 if (tdep->neon_quad_type == NULL)
4968 {
4969 struct type *t, *elem;
4970
4971 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4972 TYPE_CODE_UNION);
4973 elem = builtin_type (gdbarch)->builtin_uint8;
4974 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4975 elem = builtin_type (gdbarch)->builtin_uint16;
4976 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4977 elem = builtin_type (gdbarch)->builtin_uint32;
4978 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4979 elem = builtin_type (gdbarch)->builtin_uint64;
4980 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4981 elem = builtin_type (gdbarch)->builtin_float;
4982 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4983 elem = builtin_type (gdbarch)->builtin_double;
4984 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4985
4986 t->set_is_vector (true);
4987 t->set_name ("neon_q");
4988 tdep->neon_quad_type = t;
4989 }
4990
4991 return tdep->neon_quad_type;
4992 }
4993
4994 /* Return true if REGNUM is a Q pseudo register. Return false
4995 otherwise.
4996
4997 REGNUM is the raw register number and not a pseudo-relative register
4998 number. */
4999
5000 static bool
5001 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
5002 {
5003 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5004
5005 /* Q pseudo registers are available for both NEON (Q0~Q15) and
5006 MVE (Q0~Q7) features. */
5007 if (tdep->have_q_pseudos
5008 && regnum >= tdep->q_pseudo_base
5009 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
5010 return true;
5011
5012 return false;
5013 }
5014
5015 /* Return true if REGNUM is a VFP S pseudo register. Return false
5016 otherwise.
5017
5018 REGNUM is the raw register number and not a pseudo-relative register
5019 number. */
5020
5021 static bool
5022 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
5023 {
5024 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5025
5026 if (tdep->have_s_pseudos
5027 && regnum >= tdep->s_pseudo_base
5028 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
5029 return true;
5030
5031 return false;
5032 }
5033
5034 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
5035 otherwise.
5036
5037 REGNUM is the raw register number and not a pseudo-relative register
5038 number. */
5039
5040 static bool
5041 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5042 {
5043 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5044
5045 if (tdep->have_mve
5046 && regnum >= tdep->mve_pseudo_base
5047 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5048 return true;
5049
5050 return false;
5051 }
5052
5053 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5054 false otherwise.
5055
5056 REGNUM is the raw register number and not a pseudo-relative register
5057 number. */
5058
5059 static bool
5060 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5061 {
5062 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5063
5064 if (tdep->have_pacbti
5065 && regnum >= tdep->pacbti_pseudo_base
5066 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5067 return true;
5068
5069 return false;
5070 }
5071
5072 /* Return the GDB type object for the "standard" data type of data in
5073 register N. */
5074
5075 static struct type *
5076 arm_register_type (struct gdbarch *gdbarch, int regnum)
5077 {
5078 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5079
5080 if (is_s_pseudo (gdbarch, regnum))
5081 return builtin_type (gdbarch)->builtin_float;
5082
5083 if (is_q_pseudo (gdbarch, regnum))
5084 return arm_neon_quad_type (gdbarch);
5085
5086 if (is_mve_pseudo (gdbarch, regnum))
5087 return builtin_type (gdbarch)->builtin_int16;
5088
5089 if (is_pacbti_pseudo (gdbarch, regnum))
5090 return builtin_type (gdbarch)->builtin_uint32;
5091
5092 /* If the target description has register information, we are only
5093 in this function so that we can override the types of
5094 double-precision registers for NEON. */
5095 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5096 {
5097 struct type *t = tdesc_register_type (gdbarch, regnum);
5098
5099 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5100 && t->code () == TYPE_CODE_FLT
5101 && tdep->have_neon)
5102 return arm_neon_double_type (gdbarch);
5103 else
5104 return t;
5105 }
5106
5107 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5108 {
5109 if (!tdep->have_fpa_registers)
5110 return builtin_type (gdbarch)->builtin_void;
5111
5112 return arm_ext_type (gdbarch);
5113 }
5114 else if (regnum == ARM_SP_REGNUM)
5115 return builtin_type (gdbarch)->builtin_data_ptr;
5116 else if (regnum == ARM_PC_REGNUM)
5117 return builtin_type (gdbarch)->builtin_func_ptr;
5118 else if (regnum >= ARRAY_SIZE (arm_register_names))
5119 /* These registers are only supported on targets which supply
5120 an XML description. */
5121 return builtin_type (gdbarch)->builtin_int0;
5122 else
5123 return builtin_type (gdbarch)->builtin_uint32;
5124 }
5125
5126 /* Map a DWARF register REGNUM onto the appropriate GDB register
5127 number. */
5128
5129 static int
5130 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5131 {
5132 /* Core integer regs. */
5133 if (reg >= 0 && reg <= 15)
5134 return reg;
5135
5136 /* Legacy FPA encoding. These were once used in a way which
5137 overlapped with VFP register numbering, so their use is
5138 discouraged, but GDB doesn't support the ARM toolchain
5139 which used them for VFP. */
5140 if (reg >= 16 && reg <= 23)
5141 return ARM_F0_REGNUM + reg - 16;
5142
5143 /* New assignments for the FPA registers. */
5144 if (reg >= 96 && reg <= 103)
5145 return ARM_F0_REGNUM + reg - 96;
5146
5147 /* WMMX register assignments. */
5148 if (reg >= 104 && reg <= 111)
5149 return ARM_WCGR0_REGNUM + reg - 104;
5150
5151 if (reg >= 112 && reg <= 127)
5152 return ARM_WR0_REGNUM + reg - 112;
5153
5154 /* PACBTI register containing the Pointer Authentication Code. */
5155 if (reg == ARM_DWARF_RA_AUTH_CODE)
5156 {
5157 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5158
5159 if (tdep->have_pacbti)
5160 return tdep->pacbti_pseudo_base;
5161
5162 return -1;
5163 }
5164
5165 if (reg >= 192 && reg <= 199)
5166 return ARM_WC0_REGNUM + reg - 192;
5167
5168 /* VFP v2 registers. A double precision value is actually
5169 in d1 rather than s2, but the ABI only defines numbering
5170 for the single precision registers. This will "just work"
5171 in GDB for little endian targets (we'll read eight bytes,
5172 starting in s0 and then progressing to s1), but will be
5173 reversed on big endian targets with VFP. This won't
5174 be a problem for the new Neon quad registers; you're supposed
5175 to use DW_OP_piece for those. */
5176 if (reg >= 64 && reg <= 95)
5177 {
5178 char name_buf[4];
5179
5180 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5181 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5182 strlen (name_buf));
5183 }
5184
5185 /* VFP v3 / Neon registers. This range is also used for VFP v2
5186 registers, except that it now describes d0 instead of s0. */
5187 if (reg >= 256 && reg <= 287)
5188 {
5189 char name_buf[4];
5190
5191 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5192 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5193 strlen (name_buf));
5194 }
5195
5196 return -1;
5197 }
5198
5199 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5200 static int
5201 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5202 {
5203 int reg = regnum;
5204 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5205
5206 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5207 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5208
5209 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5210 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5211
5212 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5213 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5214
5215 if (reg < NUM_GREGS)
5216 return SIM_ARM_R0_REGNUM + reg;
5217 reg -= NUM_GREGS;
5218
5219 if (reg < NUM_FREGS)
5220 return SIM_ARM_FP0_REGNUM + reg;
5221 reg -= NUM_FREGS;
5222
5223 if (reg < NUM_SREGS)
5224 return SIM_ARM_FPS_REGNUM + reg;
5225 reg -= NUM_SREGS;
5226
5227 internal_error (_("Bad REGNUM %d"), regnum);
5228 }
5229
5230 static const unsigned char op_lit0 = DW_OP_lit0;
5231
5232 static void
5233 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5234 struct dwarf2_frame_state_reg *reg,
5235 frame_info_ptr this_frame)
5236 {
5237 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5238
5239 if (is_pacbti_pseudo (gdbarch, regnum))
5240 {
5241 /* Initialize RA_AUTH_CODE to zero. */
5242 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5243 reg->loc.exp.start = &op_lit0;
5244 reg->loc.exp.len = 1;
5245 return;
5246 }
5247
5248 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5249 {
5250 reg->how = DWARF2_FRAME_REG_FN;
5251 reg->loc.fn = arm_dwarf2_prev_register;
5252 }
5253 else if (regnum == ARM_SP_REGNUM)
5254 reg->how = DWARF2_FRAME_REG_CFA;
5255 else if (arm_is_alternative_sp_register (tdep, regnum))
5256 {
5257 /* Handle the alternative SP registers on Cortex-M. */
5258 reg->how = DWARF2_FRAME_REG_FN;
5259 reg->loc.fn = arm_dwarf2_prev_register;
5260 }
5261 }
5262
5263 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5264 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5265 NULL if an error occurs. BUF is freed. */
5266
5267 static gdb_byte *
5268 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5269 int old_len, int new_len)
5270 {
5271 gdb_byte *new_buf;
5272 int bytes_to_read = new_len - old_len;
5273
5274 new_buf = (gdb_byte *) xmalloc (new_len);
5275 memcpy (new_buf + bytes_to_read, buf, old_len);
5276 xfree (buf);
5277 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5278 {
5279 xfree (new_buf);
5280 return NULL;
5281 }
5282 return new_buf;
5283 }
5284
5285 /* An IT block is at most the 2-byte IT instruction followed by
5286 four 4-byte instructions. The furthest back we must search to
5287 find an IT block that affects the current instruction is thus
5288 2 + 3 * 4 == 14 bytes. */
5289 #define MAX_IT_BLOCK_PREFIX 14
5290
5291 /* Use a quick scan if there are more than this many bytes of
5292 code. */
5293 #define IT_SCAN_THRESHOLD 32
5294
5295 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5296 A breakpoint in an IT block may not be hit, depending on the
5297 condition flags. */
5298 static CORE_ADDR
5299 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5300 {
5301 gdb_byte *buf;
5302 char map_type;
5303 CORE_ADDR boundary, func_start;
5304 int buf_len;
5305 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5306 int i, any, last_it, last_it_count;
5307 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5308
5309 /* If we are using BKPT breakpoints, none of this is necessary. */
5310 if (tdep->thumb2_breakpoint == NULL)
5311 return bpaddr;
5312
5313 /* ARM mode does not have this problem. */
5314 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5315 return bpaddr;
5316
5317 /* We are setting a breakpoint in Thumb code that could potentially
5318 contain an IT block. The first step is to find how much Thumb
5319 code there is; we do not need to read outside of known Thumb
5320 sequences. */
5321 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5322 if (map_type == 0)
5323 /* Thumb-2 code must have mapping symbols to have a chance. */
5324 return bpaddr;
5325
5326 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5327
5328 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5329 && func_start > boundary)
5330 boundary = func_start;
5331
5332 /* Search for a candidate IT instruction. We have to do some fancy
5333 footwork to distinguish a real IT instruction from the second
5334 half of a 32-bit instruction, but there is no need for that if
5335 there's no candidate. */
5336 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5337 if (buf_len == 0)
5338 /* No room for an IT instruction. */
5339 return bpaddr;
5340
5341 buf = (gdb_byte *) xmalloc (buf_len);
5342 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5343 return bpaddr;
5344 any = 0;
5345 for (i = 0; i < buf_len; i += 2)
5346 {
5347 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5348 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5349 {
5350 any = 1;
5351 break;
5352 }
5353 }
5354
5355 if (any == 0)
5356 {
5357 xfree (buf);
5358 return bpaddr;
5359 }
5360
5361 /* OK, the code bytes before this instruction contain at least one
5362 halfword which resembles an IT instruction. We know that it's
5363 Thumb code, but there are still two possibilities. Either the
5364 halfword really is an IT instruction, or it is the second half of
5365 a 32-bit Thumb instruction. The only way we can tell is to
5366 scan forwards from a known instruction boundary. */
5367 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5368 {
5369 int definite;
5370
5371 /* There's a lot of code before this instruction. Start with an
5372 optimistic search; it's easy to recognize halfwords that can
5373 not be the start of a 32-bit instruction, and use that to
5374 lock on to the instruction boundaries. */
5375 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5376 if (buf == NULL)
5377 return bpaddr;
5378 buf_len = IT_SCAN_THRESHOLD;
5379
5380 definite = 0;
5381 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5382 {
5383 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5384 if (thumb_insn_size (inst1) == 2)
5385 {
5386 definite = 1;
5387 break;
5388 }
5389 }
5390
5391 /* At this point, if DEFINITE, BUF[I] is the first place we
5392 are sure that we know the instruction boundaries, and it is far
5393 enough from BPADDR that we could not miss an IT instruction
5394 affecting BPADDR. If ! DEFINITE, give up - start from a
5395 known boundary. */
5396 if (! definite)
5397 {
5398 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5399 bpaddr - boundary);
5400 if (buf == NULL)
5401 return bpaddr;
5402 buf_len = bpaddr - boundary;
5403 i = 0;
5404 }
5405 }
5406 else
5407 {
5408 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5409 if (buf == NULL)
5410 return bpaddr;
5411 buf_len = bpaddr - boundary;
5412 i = 0;
5413 }
5414
5415 /* Scan forwards. Find the last IT instruction before BPADDR. */
5416 last_it = -1;
5417 last_it_count = 0;
5418 while (i < buf_len)
5419 {
5420 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5421 last_it_count--;
5422 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5423 {
5424 last_it = i;
5425 if (inst1 & 0x0001)
5426 last_it_count = 4;
5427 else if (inst1 & 0x0002)
5428 last_it_count = 3;
5429 else if (inst1 & 0x0004)
5430 last_it_count = 2;
5431 else
5432 last_it_count = 1;
5433 }
5434 i += thumb_insn_size (inst1);
5435 }
5436
5437 xfree (buf);
5438
5439 if (last_it == -1)
5440 /* There wasn't really an IT instruction after all. */
5441 return bpaddr;
5442
5443 if (last_it_count < 1)
5444 /* It was too far away. */
5445 return bpaddr;
5446
5447 /* This really is a trouble spot. Move the breakpoint to the IT
5448 instruction. */
5449 return bpaddr - buf_len + last_it;
5450 }
5451
5452 /* ARM displaced stepping support.
5453
5454 Generally ARM displaced stepping works as follows:
5455
5456 1. When an instruction is to be single-stepped, it is first decoded by
5457 arm_process_displaced_insn. Depending on the type of instruction, it is
5458 then copied to a scratch location, possibly in a modified form. The
5459 copy_* set of functions performs such modification, as necessary. A
5460 breakpoint is placed after the modified instruction in the scratch space
5461 to return control to GDB. Note in particular that instructions which
5462 modify the PC will no longer do so after modification.
5463
5464 2. The instruction is single-stepped, by setting the PC to the scratch
5465 location address, and resuming. Control returns to GDB when the
5466 breakpoint is hit.
5467
5468 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5469 function used for the current instruction. This function's job is to
5470 put the CPU/memory state back to what it would have been if the
5471 instruction had been executed unmodified in its original location. */
5472
5473 /* NOP instruction (mov r0, r0). */
5474 #define ARM_NOP 0xe1a00000
5475 #define THUMB_NOP 0x4600
5476
5477 /* Helper for register reads for displaced stepping. In particular, this
5478 returns the PC as it would be seen by the instruction at its original
5479 location. */
5480
5481 ULONGEST
5482 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5483 int regno)
5484 {
5485 ULONGEST ret;
5486 CORE_ADDR from = dsc->insn_addr;
5487
5488 if (regno == ARM_PC_REGNUM)
5489 {
5490 /* Compute pipeline offset:
5491 - When executing an ARM instruction, PC reads as the address of the
5492 current instruction plus 8.
5493 - When executing a Thumb instruction, PC reads as the address of the
5494 current instruction plus 4. */
5495
5496 if (!dsc->is_thumb)
5497 from += 8;
5498 else
5499 from += 4;
5500
5501 displaced_debug_printf ("read pc value %.8lx",
5502 (unsigned long) from);
5503 return (ULONGEST) from;
5504 }
5505 else
5506 {
5507 regcache_cooked_read_unsigned (regs, regno, &ret);
5508
5509 displaced_debug_printf ("read r%d value %.8lx",
5510 regno, (unsigned long) ret);
5511
5512 return ret;
5513 }
5514 }
5515
5516 static int
5517 displaced_in_arm_mode (struct regcache *regs)
5518 {
5519 ULONGEST ps;
5520 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5521
5522 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5523
5524 return (ps & t_bit) == 0;
5525 }
5526
5527 /* Write to the PC as from a branch instruction. */
5528
5529 static void
5530 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5531 ULONGEST val)
5532 {
5533 if (!dsc->is_thumb)
5534 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5535 architecture versions < 6. */
5536 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5537 val & ~(ULONGEST) 0x3);
5538 else
5539 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5540 val & ~(ULONGEST) 0x1);
5541 }
5542
5543 /* Write to the PC as from a branch-exchange instruction. */
5544
5545 static void
5546 bx_write_pc (struct regcache *regs, ULONGEST val)
5547 {
5548 ULONGEST ps;
5549 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5550
5551 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5552
5553 if ((val & 1) == 1)
5554 {
5555 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5556 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5557 }
5558 else if ((val & 2) == 0)
5559 {
5560 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5561 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5562 }
5563 else
5564 {
5565 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5566 mode, align dest to 4 bytes). */
5567 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5568 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5569 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5570 }
5571 }
5572
5573 /* Write to the PC as if from a load instruction. */
5574
5575 static void
5576 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5577 ULONGEST val)
5578 {
5579 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5580 bx_write_pc (regs, val);
5581 else
5582 branch_write_pc (regs, dsc, val);
5583 }
5584
5585 /* Write to the PC as if from an ALU instruction. */
5586
5587 static void
5588 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5589 ULONGEST val)
5590 {
5591 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5592 bx_write_pc (regs, val);
5593 else
5594 branch_write_pc (regs, dsc, val);
5595 }
5596
5597 /* Helper for writing to registers for displaced stepping. Writing to the PC
5598 has a varying effects depending on the instruction which does the write:
5599 this is controlled by the WRITE_PC argument. */
5600
5601 void
5602 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5603 int regno, ULONGEST val, enum pc_write_style write_pc)
5604 {
5605 if (regno == ARM_PC_REGNUM)
5606 {
5607 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5608
5609 switch (write_pc)
5610 {
5611 case BRANCH_WRITE_PC:
5612 branch_write_pc (regs, dsc, val);
5613 break;
5614
5615 case BX_WRITE_PC:
5616 bx_write_pc (regs, val);
5617 break;
5618
5619 case LOAD_WRITE_PC:
5620 load_write_pc (regs, dsc, val);
5621 break;
5622
5623 case ALU_WRITE_PC:
5624 alu_write_pc (regs, dsc, val);
5625 break;
5626
5627 case CANNOT_WRITE_PC:
5628 warning (_("Instruction wrote to PC in an unexpected way when "
5629 "single-stepping"));
5630 break;
5631
5632 default:
5633 internal_error (_("Invalid argument to displaced_write_reg"));
5634 }
5635
5636 dsc->wrote_to_pc = 1;
5637 }
5638 else
5639 {
5640 displaced_debug_printf ("writing r%d value %.8lx",
5641 regno, (unsigned long) val);
5642 regcache_cooked_write_unsigned (regs, regno, val);
5643 }
5644 }
5645
5646 /* This function is used to concisely determine if an instruction INSN
5647 references PC. Register fields of interest in INSN should have the
5648 corresponding fields of BITMASK set to 0b1111. The function
5649 returns return 1 if any of these fields in INSN reference the PC
5650 (also 0b1111, r15), else it returns 0. */
5651
5652 static int
5653 insn_references_pc (uint32_t insn, uint32_t bitmask)
5654 {
5655 uint32_t lowbit = 1;
5656
5657 while (bitmask != 0)
5658 {
5659 uint32_t mask;
5660
5661 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5662 ;
5663
5664 if (!lowbit)
5665 break;
5666
5667 mask = lowbit * 0xf;
5668
5669 if ((insn & mask) == mask)
5670 return 1;
5671
5672 bitmask &= ~mask;
5673 }
5674
5675 return 0;
5676 }
5677
5678 /* The simplest copy function. Many instructions have the same effect no
5679 matter what address they are executed at: in those cases, use this. */
5680
5681 static int
5682 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5683 arm_displaced_step_copy_insn_closure *dsc)
5684 {
5685 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5686 (unsigned long) insn, iname);
5687
5688 dsc->modinsn[0] = insn;
5689
5690 return 0;
5691 }
5692
5693 static int
5694 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5695 uint16_t insn2, const char *iname,
5696 arm_displaced_step_copy_insn_closure *dsc)
5697 {
5698 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5699 "unmodified", insn1, insn2, iname);
5700
5701 dsc->modinsn[0] = insn1;
5702 dsc->modinsn[1] = insn2;
5703 dsc->numinsns = 2;
5704
5705 return 0;
5706 }
5707
5708 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5709 modification. */
5710 static int
5711 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5712 const char *iname,
5713 arm_displaced_step_copy_insn_closure *dsc)
5714 {
5715 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5716 insn, iname);
5717
5718 dsc->modinsn[0] = insn;
5719
5720 return 0;
5721 }
5722
5723 /* Preload instructions with immediate offset. */
5724
5725 static void
5726 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5727 arm_displaced_step_copy_insn_closure *dsc)
5728 {
5729 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5730 if (!dsc->u.preload.immed)
5731 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5732 }
5733
5734 static void
5735 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5736 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5737 {
5738 ULONGEST rn_val;
5739 /* Preload instructions:
5740
5741 {pli/pld} [rn, #+/-imm]
5742 ->
5743 {pli/pld} [r0, #+/-imm]. */
5744
5745 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5746 rn_val = displaced_read_reg (regs, dsc, rn);
5747 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5748 dsc->u.preload.immed = 1;
5749
5750 dsc->cleanup = &cleanup_preload;
5751 }
5752
5753 static int
5754 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5755 arm_displaced_step_copy_insn_closure *dsc)
5756 {
5757 unsigned int rn = bits (insn, 16, 19);
5758
5759 if (!insn_references_pc (insn, 0x000f0000ul))
5760 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5761
5762 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5763
5764 dsc->modinsn[0] = insn & 0xfff0ffff;
5765
5766 install_preload (gdbarch, regs, dsc, rn);
5767
5768 return 0;
5769 }
5770
5771 static int
5772 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5773 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5774 {
5775 unsigned int rn = bits (insn1, 0, 3);
5776 unsigned int u_bit = bit (insn1, 7);
5777 int imm12 = bits (insn2, 0, 11);
5778 ULONGEST pc_val;
5779
5780 if (rn != ARM_PC_REGNUM)
5781 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5782
5783 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5784 PLD (literal) Encoding T1. */
5785 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5786 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5787 imm12);
5788
5789 if (!u_bit)
5790 imm12 = -1 * imm12;
5791
5792 /* Rewrite instruction {pli/pld} PC imm12 into:
5793 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5794
5795 {pli/pld} [r0, r1]
5796
5797 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5798
5799 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5800 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5801
5802 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5803
5804 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5805 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5806 dsc->u.preload.immed = 0;
5807
5808 /* {pli/pld} [r0, r1] */
5809 dsc->modinsn[0] = insn1 & 0xfff0;
5810 dsc->modinsn[1] = 0xf001;
5811 dsc->numinsns = 2;
5812
5813 dsc->cleanup = &cleanup_preload;
5814 return 0;
5815 }
5816
5817 /* Preload instructions with register offset. */
5818
5819 static void
5820 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5821 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5822 unsigned int rm)
5823 {
5824 ULONGEST rn_val, rm_val;
5825
5826 /* Preload register-offset instructions:
5827
5828 {pli/pld} [rn, rm {, shift}]
5829 ->
5830 {pli/pld} [r0, r1 {, shift}]. */
5831
5832 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5833 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5834 rn_val = displaced_read_reg (regs, dsc, rn);
5835 rm_val = displaced_read_reg (regs, dsc, rm);
5836 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5837 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5838 dsc->u.preload.immed = 0;
5839
5840 dsc->cleanup = &cleanup_preload;
5841 }
5842
5843 static int
5844 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5845 struct regcache *regs,
5846 arm_displaced_step_copy_insn_closure *dsc)
5847 {
5848 unsigned int rn = bits (insn, 16, 19);
5849 unsigned int rm = bits (insn, 0, 3);
5850
5851
5852 if (!insn_references_pc (insn, 0x000f000ful))
5853 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5854
5855 displaced_debug_printf ("copying preload insn %.8lx",
5856 (unsigned long) insn);
5857
5858 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5859
5860 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5861 return 0;
5862 }
5863
5864 /* Copy/cleanup coprocessor load and store instructions. */
5865
5866 static void
5867 cleanup_copro_load_store (struct gdbarch *gdbarch,
5868 struct regcache *regs,
5869 arm_displaced_step_copy_insn_closure *dsc)
5870 {
5871 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5872
5873 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5874
5875 if (dsc->u.ldst.writeback)
5876 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5877 }
5878
5879 static void
5880 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5881 arm_displaced_step_copy_insn_closure *dsc,
5882 int writeback, unsigned int rn)
5883 {
5884 ULONGEST rn_val;
5885
5886 /* Coprocessor load/store instructions:
5887
5888 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5889 ->
5890 {stc/stc2} [r0, #+/-imm].
5891
5892 ldc/ldc2 are handled identically. */
5893
5894 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5895 rn_val = displaced_read_reg (regs, dsc, rn);
5896 /* PC should be 4-byte aligned. */
5897 rn_val = rn_val & 0xfffffffc;
5898 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5899
5900 dsc->u.ldst.writeback = writeback;
5901 dsc->u.ldst.rn = rn;
5902
5903 dsc->cleanup = &cleanup_copro_load_store;
5904 }
5905
5906 static int
5907 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5908 struct regcache *regs,
5909 arm_displaced_step_copy_insn_closure *dsc)
5910 {
5911 unsigned int rn = bits (insn, 16, 19);
5912
5913 if (!insn_references_pc (insn, 0x000f0000ul))
5914 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5915
5916 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5917 (unsigned long) insn);
5918
5919 dsc->modinsn[0] = insn & 0xfff0ffff;
5920
5921 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5922
5923 return 0;
5924 }
5925
5926 static int
5927 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5928 uint16_t insn2, struct regcache *regs,
5929 arm_displaced_step_copy_insn_closure *dsc)
5930 {
5931 unsigned int rn = bits (insn1, 0, 3);
5932
5933 if (rn != ARM_PC_REGNUM)
5934 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5935 "copro load/store", dsc);
5936
5937 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5938 insn1, insn2);
5939
5940 dsc->modinsn[0] = insn1 & 0xfff0;
5941 dsc->modinsn[1] = insn2;
5942 dsc->numinsns = 2;
5943
5944 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5945 doesn't support writeback, so pass 0. */
5946 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5947
5948 return 0;
5949 }
5950
5951 /* Clean up branch instructions (actually perform the branch, by setting
5952 PC). */
5953
5954 static void
5955 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5956 arm_displaced_step_copy_insn_closure *dsc)
5957 {
5958 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5959 int branch_taken = condition_true (dsc->u.branch.cond, status);
5960 enum pc_write_style write_pc = dsc->u.branch.exchange
5961 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5962
5963 if (!branch_taken)
5964 return;
5965
5966 if (dsc->u.branch.link)
5967 {
5968 /* The value of LR should be the next insn of current one. In order
5969 not to confuse logic handling later insn `bx lr', if current insn mode
5970 is Thumb, the bit 0 of LR value should be set to 1. */
5971 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5972
5973 if (dsc->is_thumb)
5974 next_insn_addr |= 0x1;
5975
5976 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5977 CANNOT_WRITE_PC);
5978 }
5979
5980 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5981 }
5982
5983 /* Copy B/BL/BLX instructions with immediate destinations. */
5984
5985 static void
5986 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5987 arm_displaced_step_copy_insn_closure *dsc,
5988 unsigned int cond, int exchange, int link, long offset)
5989 {
5990 /* Implement "BL<cond> <label>" as:
5991
5992 Preparation: cond <- instruction condition
5993 Insn: mov r0, r0 (nop)
5994 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5995
5996 B<cond> similar, but don't set r14 in cleanup. */
5997
5998 dsc->u.branch.cond = cond;
5999 dsc->u.branch.link = link;
6000 dsc->u.branch.exchange = exchange;
6001
6002 dsc->u.branch.dest = dsc->insn_addr;
6003 if (link && exchange)
6004 /* For BLX, offset is computed from the Align (PC, 4). */
6005 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6006
6007 if (dsc->is_thumb)
6008 dsc->u.branch.dest += 4 + offset;
6009 else
6010 dsc->u.branch.dest += 8 + offset;
6011
6012 dsc->cleanup = &cleanup_branch;
6013 }
6014 static int
6015 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6016 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6017 {
6018 unsigned int cond = bits (insn, 28, 31);
6019 int exchange = (cond == 0xf);
6020 int link = exchange || bit (insn, 24);
6021 long offset;
6022
6023 displaced_debug_printf ("copying %s immediate insn %.8lx",
6024 (exchange) ? "blx" : (link) ? "bl" : "b",
6025 (unsigned long) insn);
6026 if (exchange)
6027 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6028 then arrange the switch into Thumb mode. */
6029 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6030 else
6031 offset = bits (insn, 0, 23) << 2;
6032
6033 if (bit (offset, 25))
6034 offset = offset | ~0x3ffffff;
6035
6036 dsc->modinsn[0] = ARM_NOP;
6037
6038 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6039 return 0;
6040 }
6041
6042 static int
6043 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6044 uint16_t insn2, struct regcache *regs,
6045 arm_displaced_step_copy_insn_closure *dsc)
6046 {
6047 int link = bit (insn2, 14);
6048 int exchange = link && !bit (insn2, 12);
6049 int cond = INST_AL;
6050 long offset = 0;
6051 int j1 = bit (insn2, 13);
6052 int j2 = bit (insn2, 11);
6053 int s = sbits (insn1, 10, 10);
6054 int i1 = !(j1 ^ bit (insn1, 10));
6055 int i2 = !(j2 ^ bit (insn1, 10));
6056
6057 if (!link && !exchange) /* B */
6058 {
6059 offset = (bits (insn2, 0, 10) << 1);
6060 if (bit (insn2, 12)) /* Encoding T4 */
6061 {
6062 offset |= (bits (insn1, 0, 9) << 12)
6063 | (i2 << 22)
6064 | (i1 << 23)
6065 | (s << 24);
6066 cond = INST_AL;
6067 }
6068 else /* Encoding T3 */
6069 {
6070 offset |= (bits (insn1, 0, 5) << 12)
6071 | (j1 << 18)
6072 | (j2 << 19)
6073 | (s << 20);
6074 cond = bits (insn1, 6, 9);
6075 }
6076 }
6077 else
6078 {
6079 offset = (bits (insn1, 0, 9) << 12);
6080 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6081 offset |= exchange ?
6082 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6083 }
6084
6085 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6086 link ? (exchange) ? "blx" : "bl" : "b",
6087 insn1, insn2, offset);
6088
6089 dsc->modinsn[0] = THUMB_NOP;
6090
6091 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6092 return 0;
6093 }
6094
6095 /* Copy B Thumb instructions. */
6096 static int
6097 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6098 arm_displaced_step_copy_insn_closure *dsc)
6099 {
6100 unsigned int cond = 0;
6101 int offset = 0;
6102 unsigned short bit_12_15 = bits (insn, 12, 15);
6103 CORE_ADDR from = dsc->insn_addr;
6104
6105 if (bit_12_15 == 0xd)
6106 {
6107 /* offset = SignExtend (imm8:0, 32) */
6108 offset = sbits ((insn << 1), 0, 8);
6109 cond = bits (insn, 8, 11);
6110 }
6111 else if (bit_12_15 == 0xe) /* Encoding T2 */
6112 {
6113 offset = sbits ((insn << 1), 0, 11);
6114 cond = INST_AL;
6115 }
6116
6117 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6118 insn, offset);
6119
6120 dsc->u.branch.cond = cond;
6121 dsc->u.branch.link = 0;
6122 dsc->u.branch.exchange = 0;
6123 dsc->u.branch.dest = from + 4 + offset;
6124
6125 dsc->modinsn[0] = THUMB_NOP;
6126
6127 dsc->cleanup = &cleanup_branch;
6128
6129 return 0;
6130 }
6131
6132 /* Copy BX/BLX with register-specified destinations. */
6133
6134 static void
6135 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6136 arm_displaced_step_copy_insn_closure *dsc, int link,
6137 unsigned int cond, unsigned int rm)
6138 {
6139 /* Implement {BX,BLX}<cond> <reg>" as:
6140
6141 Preparation: cond <- instruction condition
6142 Insn: mov r0, r0 (nop)
6143 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6144
6145 Don't set r14 in cleanup for BX. */
6146
6147 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6148
6149 dsc->u.branch.cond = cond;
6150 dsc->u.branch.link = link;
6151
6152 dsc->u.branch.exchange = 1;
6153
6154 dsc->cleanup = &cleanup_branch;
6155 }
6156
6157 static int
6158 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6159 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6160 {
6161 unsigned int cond = bits (insn, 28, 31);
6162 /* BX: x12xxx1x
6163 BLX: x12xxx3x. */
6164 int link = bit (insn, 5);
6165 unsigned int rm = bits (insn, 0, 3);
6166
6167 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6168
6169 dsc->modinsn[0] = ARM_NOP;
6170
6171 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6172 return 0;
6173 }
6174
6175 static int
6176 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6177 struct regcache *regs,
6178 arm_displaced_step_copy_insn_closure *dsc)
6179 {
6180 int link = bit (insn, 7);
6181 unsigned int rm = bits (insn, 3, 6);
6182
6183 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6184
6185 dsc->modinsn[0] = THUMB_NOP;
6186
6187 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6188
6189 return 0;
6190 }
6191
6192
6193 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6194
6195 static void
6196 cleanup_alu_imm (struct gdbarch *gdbarch,
6197 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6198 {
6199 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6200 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6201 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6202 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6203 }
6204
6205 static int
6206 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6207 arm_displaced_step_copy_insn_closure *dsc)
6208 {
6209 unsigned int rn = bits (insn, 16, 19);
6210 unsigned int rd = bits (insn, 12, 15);
6211 unsigned int op = bits (insn, 21, 24);
6212 int is_mov = (op == 0xd);
6213 ULONGEST rd_val, rn_val;
6214
6215 if (!insn_references_pc (insn, 0x000ff000ul))
6216 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6217
6218 displaced_debug_printf ("copying immediate %s insn %.8lx",
6219 is_mov ? "move" : "ALU",
6220 (unsigned long) insn);
6221
6222 /* Instruction is of form:
6223
6224 <op><cond> rd, [rn,] #imm
6225
6226 Rewrite as:
6227
6228 Preparation: tmp1, tmp2 <- r0, r1;
6229 r0, r1 <- rd, rn
6230 Insn: <op><cond> r0, r1, #imm
6231 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6232 */
6233
6234 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6235 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6236 rn_val = displaced_read_reg (regs, dsc, rn);
6237 rd_val = displaced_read_reg (regs, dsc, rd);
6238 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6239 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6240 dsc->rd = rd;
6241
6242 if (is_mov)
6243 dsc->modinsn[0] = insn & 0xfff00fff;
6244 else
6245 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6246
6247 dsc->cleanup = &cleanup_alu_imm;
6248
6249 return 0;
6250 }
6251
6252 static int
6253 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6254 uint16_t insn2, struct regcache *regs,
6255 arm_displaced_step_copy_insn_closure *dsc)
6256 {
6257 unsigned int op = bits (insn1, 5, 8);
6258 unsigned int rn, rm, rd;
6259 ULONGEST rd_val, rn_val;
6260
6261 rn = bits (insn1, 0, 3); /* Rn */
6262 rm = bits (insn2, 0, 3); /* Rm */
6263 rd = bits (insn2, 8, 11); /* Rd */
6264
6265 /* This routine is only called for instruction MOV. */
6266 gdb_assert (op == 0x2 && rn == 0xf);
6267
6268 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6269 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6270
6271 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6272
6273 /* Instruction is of form:
6274
6275 <op><cond> rd, [rn,] #imm
6276
6277 Rewrite as:
6278
6279 Preparation: tmp1, tmp2 <- r0, r1;
6280 r0, r1 <- rd, rn
6281 Insn: <op><cond> r0, r1, #imm
6282 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6283 */
6284
6285 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6286 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6287 rn_val = displaced_read_reg (regs, dsc, rn);
6288 rd_val = displaced_read_reg (regs, dsc, rd);
6289 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6290 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6291 dsc->rd = rd;
6292
6293 dsc->modinsn[0] = insn1;
6294 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6295 dsc->numinsns = 2;
6296
6297 dsc->cleanup = &cleanup_alu_imm;
6298
6299 return 0;
6300 }
6301
6302 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6303
6304 static void
6305 cleanup_alu_reg (struct gdbarch *gdbarch,
6306 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6307 {
6308 ULONGEST rd_val;
6309 int i;
6310
6311 rd_val = displaced_read_reg (regs, dsc, 0);
6312
6313 for (i = 0; i < 3; i++)
6314 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6315
6316 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6317 }
6318
6319 static void
6320 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6321 arm_displaced_step_copy_insn_closure *dsc,
6322 unsigned int rd, unsigned int rn, unsigned int rm)
6323 {
6324 ULONGEST rd_val, rn_val, rm_val;
6325
6326 /* Instruction is of form:
6327
6328 <op><cond> rd, [rn,] rm [, <shift>]
6329
6330 Rewrite as:
6331
6332 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6333 r0, r1, r2 <- rd, rn, rm
6334 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6335 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6336 */
6337
6338 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6339 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6340 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6341 rd_val = displaced_read_reg (regs, dsc, rd);
6342 rn_val = displaced_read_reg (regs, dsc, rn);
6343 rm_val = displaced_read_reg (regs, dsc, rm);
6344 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6345 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6346 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6347 dsc->rd = rd;
6348
6349 dsc->cleanup = &cleanup_alu_reg;
6350 }
6351
6352 static int
6353 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6354 arm_displaced_step_copy_insn_closure *dsc)
6355 {
6356 unsigned int op = bits (insn, 21, 24);
6357 int is_mov = (op == 0xd);
6358
6359 if (!insn_references_pc (insn, 0x000ff00ful))
6360 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6361
6362 displaced_debug_printf ("copying reg %s insn %.8lx",
6363 is_mov ? "move" : "ALU", (unsigned long) insn);
6364
6365 if (is_mov)
6366 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6367 else
6368 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6369
6370 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6371 bits (insn, 0, 3));
6372 return 0;
6373 }
6374
6375 static int
6376 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6377 struct regcache *regs,
6378 arm_displaced_step_copy_insn_closure *dsc)
6379 {
6380 unsigned rm, rd;
6381
6382 rm = bits (insn, 3, 6);
6383 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6384
6385 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6386 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6387
6388 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6389
6390 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6391
6392 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6393
6394 return 0;
6395 }
6396
6397 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6398
6399 static void
6400 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6401 struct regcache *regs,
6402 arm_displaced_step_copy_insn_closure *dsc)
6403 {
6404 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6405 int i;
6406
6407 for (i = 0; i < 4; i++)
6408 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6409
6410 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6411 }
6412
6413 static void
6414 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6415 arm_displaced_step_copy_insn_closure *dsc,
6416 unsigned int rd, unsigned int rn, unsigned int rm,
6417 unsigned rs)
6418 {
6419 int i;
6420 ULONGEST rd_val, rn_val, rm_val, rs_val;
6421
6422 /* Instruction is of form:
6423
6424 <op><cond> rd, [rn,] rm, <shift> rs
6425
6426 Rewrite as:
6427
6428 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6429 r0, r1, r2, r3 <- rd, rn, rm, rs
6430 Insn: <op><cond> r0, r1, r2, <shift> r3
6431 Cleanup: tmp5 <- r0
6432 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6433 rd <- tmp5
6434 */
6435
6436 for (i = 0; i < 4; i++)
6437 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6438
6439 rd_val = displaced_read_reg (regs, dsc, rd);
6440 rn_val = displaced_read_reg (regs, dsc, rn);
6441 rm_val = displaced_read_reg (regs, dsc, rm);
6442 rs_val = displaced_read_reg (regs, dsc, rs);
6443 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6444 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6445 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6446 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6447 dsc->rd = rd;
6448 dsc->cleanup = &cleanup_alu_shifted_reg;
6449 }
6450
6451 static int
6452 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6453 struct regcache *regs,
6454 arm_displaced_step_copy_insn_closure *dsc)
6455 {
6456 unsigned int op = bits (insn, 21, 24);
6457 int is_mov = (op == 0xd);
6458 unsigned int rd, rn, rm, rs;
6459
6460 if (!insn_references_pc (insn, 0x000fff0ful))
6461 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6462
6463 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6464 is_mov ? "move" : "ALU",
6465 (unsigned long) insn);
6466
6467 rn = bits (insn, 16, 19);
6468 rm = bits (insn, 0, 3);
6469 rs = bits (insn, 8, 11);
6470 rd = bits (insn, 12, 15);
6471
6472 if (is_mov)
6473 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6474 else
6475 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6476
6477 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6478
6479 return 0;
6480 }
6481
6482 /* Clean up load instructions. */
6483
6484 static void
6485 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6486 arm_displaced_step_copy_insn_closure *dsc)
6487 {
6488 ULONGEST rt_val, rt_val2 = 0, rn_val;
6489
6490 rt_val = displaced_read_reg (regs, dsc, 0);
6491 if (dsc->u.ldst.xfersize == 8)
6492 rt_val2 = displaced_read_reg (regs, dsc, 1);
6493 rn_val = displaced_read_reg (regs, dsc, 2);
6494
6495 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6496 if (dsc->u.ldst.xfersize > 4)
6497 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6498 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6499 if (!dsc->u.ldst.immed)
6500 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6501
6502 /* Handle register writeback. */
6503 if (dsc->u.ldst.writeback)
6504 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6505 /* Put result in right place. */
6506 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6507 if (dsc->u.ldst.xfersize == 8)
6508 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6509 }
6510
6511 /* Clean up store instructions. */
6512
6513 static void
6514 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6515 arm_displaced_step_copy_insn_closure *dsc)
6516 {
6517 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6518
6519 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6520 if (dsc->u.ldst.xfersize > 4)
6521 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6522 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6523 if (!dsc->u.ldst.immed)
6524 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6525 if (!dsc->u.ldst.restore_r4)
6526 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6527
6528 /* Writeback. */
6529 if (dsc->u.ldst.writeback)
6530 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6531 }
6532
6533 /* Copy "extra" load/store instructions. These are halfword/doubleword
6534 transfers, which have a different encoding to byte/word transfers. */
6535
6536 static int
6537 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6538 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6539 {
6540 unsigned int op1 = bits (insn, 20, 24);
6541 unsigned int op2 = bits (insn, 5, 6);
6542 unsigned int rt = bits (insn, 12, 15);
6543 unsigned int rn = bits (insn, 16, 19);
6544 unsigned int rm = bits (insn, 0, 3);
6545 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6546 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6547 int immed = (op1 & 0x4) != 0;
6548 int opcode;
6549 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6550
6551 if (!insn_references_pc (insn, 0x000ff00ful))
6552 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6553
6554 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6555 unprivileged ? "unprivileged " : "",
6556 (unsigned long) insn);
6557
6558 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6559
6560 if (opcode < 0)
6561 internal_error (_("copy_extra_ld_st: instruction decode error"));
6562
6563 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6564 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6565 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6566 if (!immed)
6567 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6568
6569 rt_val = displaced_read_reg (regs, dsc, rt);
6570 if (bytesize[opcode] == 8)
6571 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6572 rn_val = displaced_read_reg (regs, dsc, rn);
6573 if (!immed)
6574 rm_val = displaced_read_reg (regs, dsc, rm);
6575
6576 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6577 if (bytesize[opcode] == 8)
6578 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6579 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6580 if (!immed)
6581 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6582
6583 dsc->rd = rt;
6584 dsc->u.ldst.xfersize = bytesize[opcode];
6585 dsc->u.ldst.rn = rn;
6586 dsc->u.ldst.immed = immed;
6587 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6588 dsc->u.ldst.restore_r4 = 0;
6589
6590 if (immed)
6591 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6592 ->
6593 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6594 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6595 else
6596 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6597 ->
6598 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6599 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6600
6601 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6602
6603 return 0;
6604 }
6605
6606 /* Copy byte/half word/word loads and stores. */
6607
6608 static void
6609 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6610 arm_displaced_step_copy_insn_closure *dsc, int load,
6611 int immed, int writeback, int size, int usermode,
6612 int rt, int rm, int rn)
6613 {
6614 ULONGEST rt_val, rn_val, rm_val = 0;
6615
6616 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6617 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6618 if (!immed)
6619 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6620 if (!load)
6621 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6622
6623 rt_val = displaced_read_reg (regs, dsc, rt);
6624 rn_val = displaced_read_reg (regs, dsc, rn);
6625 if (!immed)
6626 rm_val = displaced_read_reg (regs, dsc, rm);
6627
6628 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6629 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6630 if (!immed)
6631 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6632 dsc->rd = rt;
6633 dsc->u.ldst.xfersize = size;
6634 dsc->u.ldst.rn = rn;
6635 dsc->u.ldst.immed = immed;
6636 dsc->u.ldst.writeback = writeback;
6637
6638 /* To write PC we can do:
6639
6640 Before this sequence of instructions:
6641 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6642 r2 is the Rn value got from displaced_read_reg.
6643
6644 Insn1: push {pc} Write address of STR instruction + offset on stack
6645 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6646 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6647 = addr(Insn1) + offset - addr(Insn3) - 8
6648 = offset - 16
6649 Insn4: add r4, r4, #8 r4 = offset - 8
6650 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6651 = from + offset
6652 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6653
6654 Otherwise we don't know what value to write for PC, since the offset is
6655 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6656 of this can be found in Section "Saving from r15" in
6657 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6658
6659 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6660 }
6661
6662
6663 static int
6664 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6665 uint16_t insn2, struct regcache *regs,
6666 arm_displaced_step_copy_insn_closure *dsc, int size)
6667 {
6668 unsigned int u_bit = bit (insn1, 7);
6669 unsigned int rt = bits (insn2, 12, 15);
6670 int imm12 = bits (insn2, 0, 11);
6671 ULONGEST pc_val;
6672
6673 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6674 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6675 imm12);
6676
6677 if (!u_bit)
6678 imm12 = -1 * imm12;
6679
6680 /* Rewrite instruction LDR Rt imm12 into:
6681
6682 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6683
6684 LDR R0, R2, R3,
6685
6686 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6687
6688
6689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6690 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6691 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6692
6693 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6694
6695 pc_val = pc_val & 0xfffffffc;
6696
6697 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6698 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6699
6700 dsc->rd = rt;
6701
6702 dsc->u.ldst.xfersize = size;
6703 dsc->u.ldst.immed = 0;
6704 dsc->u.ldst.writeback = 0;
6705 dsc->u.ldst.restore_r4 = 0;
6706
6707 /* LDR R0, R2, R3 */
6708 dsc->modinsn[0] = 0xf852;
6709 dsc->modinsn[1] = 0x3;
6710 dsc->numinsns = 2;
6711
6712 dsc->cleanup = &cleanup_load;
6713
6714 return 0;
6715 }
6716
6717 static int
6718 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6719 uint16_t insn2, struct regcache *regs,
6720 arm_displaced_step_copy_insn_closure *dsc,
6721 int writeback, int immed)
6722 {
6723 unsigned int rt = bits (insn2, 12, 15);
6724 unsigned int rn = bits (insn1, 0, 3);
6725 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6726 /* In LDR (register), there is also a register Rm, which is not allowed to
6727 be PC, so we don't have to check it. */
6728
6729 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6730 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6731 dsc);
6732
6733 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6734 rt, rn, insn1, insn2);
6735
6736 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6737 0, rt, rm, rn);
6738
6739 dsc->u.ldst.restore_r4 = 0;
6740
6741 if (immed)
6742 /* ldr[b]<cond> rt, [rn, #imm], etc.
6743 ->
6744 ldr[b]<cond> r0, [r2, #imm]. */
6745 {
6746 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6747 dsc->modinsn[1] = insn2 & 0x0fff;
6748 }
6749 else
6750 /* ldr[b]<cond> rt, [rn, rm], etc.
6751 ->
6752 ldr[b]<cond> r0, [r2, r3]. */
6753 {
6754 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6755 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6756 }
6757
6758 dsc->numinsns = 2;
6759
6760 return 0;
6761 }
6762
6763
6764 static int
6765 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6766 struct regcache *regs,
6767 arm_displaced_step_copy_insn_closure *dsc,
6768 int load, int size, int usermode)
6769 {
6770 int immed = !bit (insn, 25);
6771 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6772 unsigned int rt = bits (insn, 12, 15);
6773 unsigned int rn = bits (insn, 16, 19);
6774 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6775
6776 if (!insn_references_pc (insn, 0x000ff00ful))
6777 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6778
6779 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6780 load ? (size == 1 ? "ldrb" : "ldr")
6781 : (size == 1 ? "strb" : "str"),
6782 usermode ? "t" : "",
6783 rt, rn,
6784 (unsigned long) insn);
6785
6786 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6787 usermode, rt, rm, rn);
6788
6789 if (load || rt != ARM_PC_REGNUM)
6790 {
6791 dsc->u.ldst.restore_r4 = 0;
6792
6793 if (immed)
6794 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6795 ->
6796 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6797 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6798 else
6799 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6800 ->
6801 {ldr,str}[b]<cond> r0, [r2, r3]. */
6802 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6803 }
6804 else
6805 {
6806 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6807 dsc->u.ldst.restore_r4 = 1;
6808 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6809 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6810 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6811 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6812 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6813
6814 /* As above. */
6815 if (immed)
6816 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6817 else
6818 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6819
6820 dsc->numinsns = 6;
6821 }
6822
6823 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6824
6825 return 0;
6826 }
6827
6828 /* Cleanup LDM instructions with fully-populated register list. This is an
6829 unfortunate corner case: it's impossible to implement correctly by modifying
6830 the instruction. The issue is as follows: we have an instruction,
6831
6832 ldm rN, {r0-r15}
6833
6834 which we must rewrite to avoid loading PC. A possible solution would be to
6835 do the load in two halves, something like (with suitable cleanup
6836 afterwards):
6837
6838 mov r8, rN
6839 ldm[id][ab] r8!, {r0-r7}
6840 str r7, <temp>
6841 ldm[id][ab] r8, {r7-r14}
6842 <bkpt>
6843
6844 but at present there's no suitable place for <temp>, since the scratch space
6845 is overwritten before the cleanup routine is called. For now, we simply
6846 emulate the instruction. */
6847
6848 static void
6849 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6850 arm_displaced_step_copy_insn_closure *dsc)
6851 {
6852 int inc = dsc->u.block.increment;
6853 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6854 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6855 uint32_t regmask = dsc->u.block.regmask;
6856 int regno = inc ? 0 : 15;
6857 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6858 int exception_return = dsc->u.block.load && dsc->u.block.user
6859 && (regmask & 0x8000) != 0;
6860 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6861 int do_transfer = condition_true (dsc->u.block.cond, status);
6862 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6863
6864 if (!do_transfer)
6865 return;
6866
6867 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6868 sensible we can do here. Complain loudly. */
6869 if (exception_return)
6870 error (_("Cannot single-step exception return"));
6871
6872 /* We don't handle any stores here for now. */
6873 gdb_assert (dsc->u.block.load != 0);
6874
6875 displaced_debug_printf ("emulating block transfer: %s %s %s",
6876 dsc->u.block.load ? "ldm" : "stm",
6877 dsc->u.block.increment ? "inc" : "dec",
6878 dsc->u.block.before ? "before" : "after");
6879
6880 while (regmask)
6881 {
6882 uint32_t memword;
6883
6884 if (inc)
6885 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6886 regno++;
6887 else
6888 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6889 regno--;
6890
6891 xfer_addr += bump_before;
6892
6893 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6894 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6895
6896 xfer_addr += bump_after;
6897
6898 regmask &= ~(1 << regno);
6899 }
6900
6901 if (dsc->u.block.writeback)
6902 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6903 CANNOT_WRITE_PC);
6904 }
6905
6906 /* Clean up an STM which included the PC in the register list. */
6907
6908 static void
6909 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6910 arm_displaced_step_copy_insn_closure *dsc)
6911 {
6912 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6913 int store_executed = condition_true (dsc->u.block.cond, status);
6914 CORE_ADDR pc_stored_at, transferred_regs
6915 = count_one_bits (dsc->u.block.regmask);
6916 CORE_ADDR stm_insn_addr;
6917 uint32_t pc_val;
6918 long offset;
6919 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6920
6921 /* If condition code fails, there's nothing else to do. */
6922 if (!store_executed)
6923 return;
6924
6925 if (dsc->u.block.increment)
6926 {
6927 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6928
6929 if (dsc->u.block.before)
6930 pc_stored_at += 4;
6931 }
6932 else
6933 {
6934 pc_stored_at = dsc->u.block.xfer_addr;
6935
6936 if (dsc->u.block.before)
6937 pc_stored_at -= 4;
6938 }
6939
6940 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6941 stm_insn_addr = dsc->scratch_base;
6942 offset = pc_val - stm_insn_addr;
6943
6944 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6945 offset);
6946
6947 /* Rewrite the stored PC to the proper value for the non-displaced original
6948 instruction. */
6949 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6950 dsc->insn_addr + offset);
6951 }
6952
6953 /* Clean up an LDM which includes the PC in the register list. We clumped all
6954 the registers in the transferred list into a contiguous range r0...rX (to
6955 avoid loading PC directly and losing control of the debugged program), so we
6956 must undo that here. */
6957
6958 static void
6959 cleanup_block_load_pc (struct gdbarch *gdbarch,
6960 struct regcache *regs,
6961 arm_displaced_step_copy_insn_closure *dsc)
6962 {
6963 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6964 int load_executed = condition_true (dsc->u.block.cond, status);
6965 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6966 unsigned int regs_loaded = count_one_bits (mask);
6967 unsigned int num_to_shuffle = regs_loaded, clobbered;
6968
6969 /* The method employed here will fail if the register list is fully populated
6970 (we need to avoid loading PC directly). */
6971 gdb_assert (num_to_shuffle < 16);
6972
6973 if (!load_executed)
6974 return;
6975
6976 clobbered = (1 << num_to_shuffle) - 1;
6977
6978 while (num_to_shuffle > 0)
6979 {
6980 if ((mask & (1 << write_reg)) != 0)
6981 {
6982 unsigned int read_reg = num_to_shuffle - 1;
6983
6984 if (read_reg != write_reg)
6985 {
6986 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6987 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6988 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6989 read_reg, write_reg);
6990 }
6991 else
6992 displaced_debug_printf ("LDM: register r%d already in the right "
6993 "place", write_reg);
6994
6995 clobbered &= ~(1 << write_reg);
6996
6997 num_to_shuffle--;
6998 }
6999
7000 write_reg--;
7001 }
7002
7003 /* Restore any registers we scribbled over. */
7004 for (write_reg = 0; clobbered != 0; write_reg++)
7005 {
7006 if ((clobbered & (1 << write_reg)) != 0)
7007 {
7008 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7009 CANNOT_WRITE_PC);
7010 displaced_debug_printf ("LDM: restored clobbered register r%d",
7011 write_reg);
7012 clobbered &= ~(1 << write_reg);
7013 }
7014 }
7015
7016 /* Perform register writeback manually. */
7017 if (dsc->u.block.writeback)
7018 {
7019 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7020
7021 if (dsc->u.block.increment)
7022 new_rn_val += regs_loaded * 4;
7023 else
7024 new_rn_val -= regs_loaded * 4;
7025
7026 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7027 CANNOT_WRITE_PC);
7028 }
7029 }
7030
7031 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7032 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7033
7034 static int
7035 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7036 struct regcache *regs,
7037 arm_displaced_step_copy_insn_closure *dsc)
7038 {
7039 int load = bit (insn, 20);
7040 int user = bit (insn, 22);
7041 int increment = bit (insn, 23);
7042 int before = bit (insn, 24);
7043 int writeback = bit (insn, 21);
7044 int rn = bits (insn, 16, 19);
7045
7046 /* Block transfers which don't mention PC can be run directly
7047 out-of-line. */
7048 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7049 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7050
7051 if (rn == ARM_PC_REGNUM)
7052 {
7053 warning (_("displaced: Unpredictable LDM or STM with "
7054 "base register r15"));
7055 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7056 }
7057
7058 displaced_debug_printf ("copying block transfer insn %.8lx",
7059 (unsigned long) insn);
7060
7061 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7062 dsc->u.block.rn = rn;
7063
7064 dsc->u.block.load = load;
7065 dsc->u.block.user = user;
7066 dsc->u.block.increment = increment;
7067 dsc->u.block.before = before;
7068 dsc->u.block.writeback = writeback;
7069 dsc->u.block.cond = bits (insn, 28, 31);
7070
7071 dsc->u.block.regmask = insn & 0xffff;
7072
7073 if (load)
7074 {
7075 if ((insn & 0xffff) == 0xffff)
7076 {
7077 /* LDM with a fully-populated register list. This case is
7078 particularly tricky. Implement for now by fully emulating the
7079 instruction (which might not behave perfectly in all cases, but
7080 these instructions should be rare enough for that not to matter
7081 too much). */
7082 dsc->modinsn[0] = ARM_NOP;
7083
7084 dsc->cleanup = &cleanup_block_load_all;
7085 }
7086 else
7087 {
7088 /* LDM of a list of registers which includes PC. Implement by
7089 rewriting the list of registers to be transferred into a
7090 contiguous chunk r0...rX before doing the transfer, then shuffling
7091 registers into the correct places in the cleanup routine. */
7092 unsigned int regmask = insn & 0xffff;
7093 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7094 unsigned int i;
7095
7096 for (i = 0; i < num_in_list; i++)
7097 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7098
7099 /* Writeback makes things complicated. We need to avoid clobbering
7100 the base register with one of the registers in our modified
7101 register list, but just using a different register can't work in
7102 all cases, e.g.:
7103
7104 ldm r14!, {r0-r13,pc}
7105
7106 which would need to be rewritten as:
7107
7108 ldm rN!, {r0-r14}
7109
7110 but that can't work, because there's no free register for N.
7111
7112 Solve this by turning off the writeback bit, and emulating
7113 writeback manually in the cleanup routine. */
7114
7115 if (writeback)
7116 insn &= ~(1 << 21);
7117
7118 new_regmask = (1 << num_in_list) - 1;
7119
7120 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7121 "%.4x, modified list %.4x",
7122 rn, writeback ? "!" : "",
7123 (int) insn & 0xffff, new_regmask);
7124
7125 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7126
7127 dsc->cleanup = &cleanup_block_load_pc;
7128 }
7129 }
7130 else
7131 {
7132 /* STM of a list of registers which includes PC. Run the instruction
7133 as-is, but out of line: this will store the wrong value for the PC,
7134 so we must manually fix up the memory in the cleanup routine.
7135 Doing things this way has the advantage that we can auto-detect
7136 the offset of the PC write (which is architecture-dependent) in
7137 the cleanup routine. */
7138 dsc->modinsn[0] = insn;
7139
7140 dsc->cleanup = &cleanup_block_store_pc;
7141 }
7142
7143 return 0;
7144 }
7145
7146 static int
7147 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7148 struct regcache *regs,
7149 arm_displaced_step_copy_insn_closure *dsc)
7150 {
7151 int rn = bits (insn1, 0, 3);
7152 int load = bit (insn1, 4);
7153 int writeback = bit (insn1, 5);
7154
7155 /* Block transfers which don't mention PC can be run directly
7156 out-of-line. */
7157 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7158 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7159
7160 if (rn == ARM_PC_REGNUM)
7161 {
7162 warning (_("displaced: Unpredictable LDM or STM with "
7163 "base register r15"));
7164 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7165 "unpredictable ldm/stm", dsc);
7166 }
7167
7168 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7169 insn1, insn2);
7170
7171 /* Clear bit 13, since it should be always zero. */
7172 dsc->u.block.regmask = (insn2 & 0xdfff);
7173 dsc->u.block.rn = rn;
7174
7175 dsc->u.block.load = load;
7176 dsc->u.block.user = 0;
7177 dsc->u.block.increment = bit (insn1, 7);
7178 dsc->u.block.before = bit (insn1, 8);
7179 dsc->u.block.writeback = writeback;
7180 dsc->u.block.cond = INST_AL;
7181 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7182
7183 if (load)
7184 {
7185 if (dsc->u.block.regmask == 0xffff)
7186 {
7187 /* This branch is impossible to happen. */
7188 gdb_assert (0);
7189 }
7190 else
7191 {
7192 unsigned int regmask = dsc->u.block.regmask;
7193 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7194 unsigned int i;
7195
7196 for (i = 0; i < num_in_list; i++)
7197 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7198
7199 if (writeback)
7200 insn1 &= ~(1 << 5);
7201
7202 new_regmask = (1 << num_in_list) - 1;
7203
7204 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7205 "%.4x, modified list %.4x",
7206 rn, writeback ? "!" : "",
7207 (int) dsc->u.block.regmask, new_regmask);
7208
7209 dsc->modinsn[0] = insn1;
7210 dsc->modinsn[1] = (new_regmask & 0xffff);
7211 dsc->numinsns = 2;
7212
7213 dsc->cleanup = &cleanup_block_load_pc;
7214 }
7215 }
7216 else
7217 {
7218 dsc->modinsn[0] = insn1;
7219 dsc->modinsn[1] = insn2;
7220 dsc->numinsns = 2;
7221 dsc->cleanup = &cleanup_block_store_pc;
7222 }
7223 return 0;
7224 }
7225
7226 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7227 This is used to avoid a dependency on BFD's bfd_endian enum. */
7228
7229 ULONGEST
7230 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7231 int byte_order)
7232 {
7233 return read_memory_unsigned_integer (memaddr, len,
7234 (enum bfd_endian) byte_order);
7235 }
7236
7237 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7238
7239 CORE_ADDR
7240 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7241 CORE_ADDR val)
7242 {
7243 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
7244 }
7245
7246 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
7247
7248 static CORE_ADDR
7249 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7250 {
7251 return 0;
7252 }
7253
7254 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7255
7256 int
7257 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7258 {
7259 return arm_is_thumb (self->regcache);
7260 }
7261
7262 /* single_step() is called just before we want to resume the inferior,
7263 if we want to single-step it but there is no hardware or kernel
7264 single-step support. We find the target of the coming instructions
7265 and breakpoint them. */
7266
7267 std::vector<CORE_ADDR>
7268 arm_software_single_step (struct regcache *regcache)
7269 {
7270 struct gdbarch *gdbarch = regcache->arch ();
7271 struct arm_get_next_pcs next_pcs_ctx;
7272
7273 arm_get_next_pcs_ctor (&next_pcs_ctx,
7274 &arm_get_next_pcs_ops,
7275 gdbarch_byte_order (gdbarch),
7276 gdbarch_byte_order_for_code (gdbarch),
7277 0,
7278 regcache);
7279
7280 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7281
7282 for (CORE_ADDR &pc_ref : next_pcs)
7283 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7284
7285 return next_pcs;
7286 }
7287
7288 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7289 for Linux, where some SVC instructions must be treated specially. */
7290
7291 static void
7292 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7293 arm_displaced_step_copy_insn_closure *dsc)
7294 {
7295 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7296
7297 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7298 (unsigned long) resume_addr);
7299
7300 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7301 }
7302
7303
7304 /* Common copy routine for svc instruction. */
7305
7306 static int
7307 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7308 arm_displaced_step_copy_insn_closure *dsc)
7309 {
7310 /* Preparation: none.
7311 Insn: unmodified svc.
7312 Cleanup: pc <- insn_addr + insn_size. */
7313
7314 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7315 instruction. */
7316 dsc->wrote_to_pc = 1;
7317
7318 /* Allow OS-specific code to override SVC handling. */
7319 if (dsc->u.svc.copy_svc_os)
7320 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7321 else
7322 {
7323 dsc->cleanup = &cleanup_svc;
7324 return 0;
7325 }
7326 }
7327
7328 static int
7329 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7330 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7331 {
7332
7333 displaced_debug_printf ("copying svc insn %.8lx",
7334 (unsigned long) insn);
7335
7336 dsc->modinsn[0] = insn;
7337
7338 return install_svc (gdbarch, regs, dsc);
7339 }
7340
7341 static int
7342 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7343 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7344 {
7345
7346 displaced_debug_printf ("copying svc insn %.4x", insn);
7347
7348 dsc->modinsn[0] = insn;
7349
7350 return install_svc (gdbarch, regs, dsc);
7351 }
7352
7353 /* Copy undefined instructions. */
7354
7355 static int
7356 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7357 arm_displaced_step_copy_insn_closure *dsc)
7358 {
7359 displaced_debug_printf ("copying undefined insn %.8lx",
7360 (unsigned long) insn);
7361
7362 dsc->modinsn[0] = insn;
7363
7364 return 0;
7365 }
7366
7367 static int
7368 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7369 arm_displaced_step_copy_insn_closure *dsc)
7370 {
7371
7372 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7373 (unsigned short) insn1, (unsigned short) insn2);
7374
7375 dsc->modinsn[0] = insn1;
7376 dsc->modinsn[1] = insn2;
7377 dsc->numinsns = 2;
7378
7379 return 0;
7380 }
7381
7382 /* Copy unpredictable instructions. */
7383
7384 static int
7385 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7386 arm_displaced_step_copy_insn_closure *dsc)
7387 {
7388 displaced_debug_printf ("copying unpredictable insn %.8lx",
7389 (unsigned long) insn);
7390
7391 dsc->modinsn[0] = insn;
7392
7393 return 0;
7394 }
7395
7396 /* The decode_* functions are instruction decoding helpers. They mostly follow
7397 the presentation in the ARM ARM. */
7398
7399 static int
7400 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7401 struct regcache *regs,
7402 arm_displaced_step_copy_insn_closure *dsc)
7403 {
7404 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7405 unsigned int rn = bits (insn, 16, 19);
7406
7407 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7408 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7409 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7410 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7411 else if ((op1 & 0x60) == 0x20)
7412 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7413 else if ((op1 & 0x71) == 0x40)
7414 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7415 dsc);
7416 else if ((op1 & 0x77) == 0x41)
7417 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7418 else if ((op1 & 0x77) == 0x45)
7419 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7420 else if ((op1 & 0x77) == 0x51)
7421 {
7422 if (rn != 0xf)
7423 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7424 else
7425 return arm_copy_unpred (gdbarch, insn, dsc);
7426 }
7427 else if ((op1 & 0x77) == 0x55)
7428 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7429 else if (op1 == 0x57)
7430 switch (op2)
7431 {
7432 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7433 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7434 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7435 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7436 default: return arm_copy_unpred (gdbarch, insn, dsc);
7437 }
7438 else if ((op1 & 0x63) == 0x43)
7439 return arm_copy_unpred (gdbarch, insn, dsc);
7440 else if ((op2 & 0x1) == 0x0)
7441 switch (op1 & ~0x80)
7442 {
7443 case 0x61:
7444 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7445 case 0x65:
7446 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7447 case 0x71: case 0x75:
7448 /* pld/pldw reg. */
7449 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7450 case 0x63: case 0x67: case 0x73: case 0x77:
7451 return arm_copy_unpred (gdbarch, insn, dsc);
7452 default:
7453 return arm_copy_undef (gdbarch, insn, dsc);
7454 }
7455 else
7456 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7457 }
7458
7459 static int
7460 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7461 struct regcache *regs,
7462 arm_displaced_step_copy_insn_closure *dsc)
7463 {
7464 if (bit (insn, 27) == 0)
7465 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7466 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7467 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7468 {
7469 case 0x0: case 0x2:
7470 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7471
7472 case 0x1: case 0x3:
7473 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7474
7475 case 0x4: case 0x5: case 0x6: case 0x7:
7476 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7477
7478 case 0x8:
7479 switch ((insn & 0xe00000) >> 21)
7480 {
7481 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7482 /* stc/stc2. */
7483 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7484
7485 case 0x2:
7486 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7487
7488 default:
7489 return arm_copy_undef (gdbarch, insn, dsc);
7490 }
7491
7492 case 0x9:
7493 {
7494 int rn_f = (bits (insn, 16, 19) == 0xf);
7495 switch ((insn & 0xe00000) >> 21)
7496 {
7497 case 0x1: case 0x3:
7498 /* ldc/ldc2 imm (undefined for rn == pc). */
7499 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7500 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7501
7502 case 0x2:
7503 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7504
7505 case 0x4: case 0x5: case 0x6: case 0x7:
7506 /* ldc/ldc2 lit (undefined for rn != pc). */
7507 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7508 : arm_copy_undef (gdbarch, insn, dsc);
7509
7510 default:
7511 return arm_copy_undef (gdbarch, insn, dsc);
7512 }
7513 }
7514
7515 case 0xa:
7516 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7517
7518 case 0xb:
7519 if (bits (insn, 16, 19) == 0xf)
7520 /* ldc/ldc2 lit. */
7521 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7522 else
7523 return arm_copy_undef (gdbarch, insn, dsc);
7524
7525 case 0xc:
7526 if (bit (insn, 4))
7527 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7528 else
7529 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7530
7531 case 0xd:
7532 if (bit (insn, 4))
7533 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7534 else
7535 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7536
7537 default:
7538 return arm_copy_undef (gdbarch, insn, dsc);
7539 }
7540 }
7541
7542 /* Decode miscellaneous instructions in dp/misc encoding space. */
7543
7544 static int
7545 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7546 struct regcache *regs,
7547 arm_displaced_step_copy_insn_closure *dsc)
7548 {
7549 unsigned int op2 = bits (insn, 4, 6);
7550 unsigned int op = bits (insn, 21, 22);
7551
7552 switch (op2)
7553 {
7554 case 0x0:
7555 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7556
7557 case 0x1:
7558 if (op == 0x1) /* bx. */
7559 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7560 else if (op == 0x3)
7561 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7562 else
7563 return arm_copy_undef (gdbarch, insn, dsc);
7564
7565 case 0x2:
7566 if (op == 0x1)
7567 /* Not really supported. */
7568 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7569 else
7570 return arm_copy_undef (gdbarch, insn, dsc);
7571
7572 case 0x3:
7573 if (op == 0x1)
7574 return arm_copy_bx_blx_reg (gdbarch, insn,
7575 regs, dsc); /* blx register. */
7576 else
7577 return arm_copy_undef (gdbarch, insn, dsc);
7578
7579 case 0x5:
7580 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7581
7582 case 0x7:
7583 if (op == 0x1)
7584 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7585 else if (op == 0x3)
7586 /* Not really supported. */
7587 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7588 /* Fall through. */
7589
7590 default:
7591 return arm_copy_undef (gdbarch, insn, dsc);
7592 }
7593 }
7594
7595 static int
7596 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7597 struct regcache *regs,
7598 arm_displaced_step_copy_insn_closure *dsc)
7599 {
7600 if (bit (insn, 25))
7601 switch (bits (insn, 20, 24))
7602 {
7603 case 0x10:
7604 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7605
7606 case 0x14:
7607 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7608
7609 case 0x12: case 0x16:
7610 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7611
7612 default:
7613 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7614 }
7615 else
7616 {
7617 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7618
7619 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7620 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7621 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7622 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7623 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7624 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7625 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7626 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7627 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7628 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7629 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7630 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7631 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7632 /* 2nd arg means "unprivileged". */
7633 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7634 dsc);
7635 }
7636
7637 /* Should be unreachable. */
7638 return 1;
7639 }
7640
7641 static int
7642 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7643 struct regcache *regs,
7644 arm_displaced_step_copy_insn_closure *dsc)
7645 {
7646 int a = bit (insn, 25), b = bit (insn, 4);
7647 uint32_t op1 = bits (insn, 20, 24);
7648
7649 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7650 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7652 else if ((!a && (op1 & 0x17) == 0x02)
7653 || (a && (op1 & 0x17) == 0x02 && !b))
7654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7655 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7656 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7657 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7658 else if ((!a && (op1 & 0x17) == 0x03)
7659 || (a && (op1 & 0x17) == 0x03 && !b))
7660 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7661 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7662 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7663 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7664 else if ((!a && (op1 & 0x17) == 0x06)
7665 || (a && (op1 & 0x17) == 0x06 && !b))
7666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7667 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7668 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7670 else if ((!a && (op1 & 0x17) == 0x07)
7671 || (a && (op1 & 0x17) == 0x07 && !b))
7672 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7673
7674 /* Should be unreachable. */
7675 return 1;
7676 }
7677
7678 static int
7679 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7680 arm_displaced_step_copy_insn_closure *dsc)
7681 {
7682 switch (bits (insn, 20, 24))
7683 {
7684 case 0x00: case 0x01: case 0x02: case 0x03:
7685 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7686
7687 case 0x04: case 0x05: case 0x06: case 0x07:
7688 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7689
7690 case 0x08: case 0x09: case 0x0a: case 0x0b:
7691 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7692 return arm_copy_unmodified (gdbarch, insn,
7693 "decode/pack/unpack/saturate/reverse", dsc);
7694
7695 case 0x18:
7696 if (bits (insn, 5, 7) == 0) /* op2. */
7697 {
7698 if (bits (insn, 12, 15) == 0xf)
7699 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7700 else
7701 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7702 }
7703 else
7704 return arm_copy_undef (gdbarch, insn, dsc);
7705
7706 case 0x1a: case 0x1b:
7707 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7708 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7709 else
7710 return arm_copy_undef (gdbarch, insn, dsc);
7711
7712 case 0x1c: case 0x1d:
7713 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7714 {
7715 if (bits (insn, 0, 3) == 0xf)
7716 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7717 else
7718 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7719 }
7720 else
7721 return arm_copy_undef (gdbarch, insn, dsc);
7722
7723 case 0x1e: case 0x1f:
7724 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7725 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7726 else
7727 return arm_copy_undef (gdbarch, insn, dsc);
7728 }
7729
7730 /* Should be unreachable. */
7731 return 1;
7732 }
7733
7734 static int
7735 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7736 struct regcache *regs,
7737 arm_displaced_step_copy_insn_closure *dsc)
7738 {
7739 if (bit (insn, 25))
7740 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7741 else
7742 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7743 }
7744
7745 static int
7746 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7747 struct regcache *regs,
7748 arm_displaced_step_copy_insn_closure *dsc)
7749 {
7750 unsigned int opcode = bits (insn, 20, 24);
7751
7752 switch (opcode)
7753 {
7754 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7755 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7756
7757 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7758 case 0x12: case 0x16:
7759 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7760
7761 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7762 case 0x13: case 0x17:
7763 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7764
7765 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7766 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7767 /* Note: no writeback for these instructions. Bit 25 will always be
7768 zero though (via caller), so the following works OK. */
7769 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7770 }
7771
7772 /* Should be unreachable. */
7773 return 1;
7774 }
7775
7776 /* Decode shifted register instructions. */
7777
7778 static int
7779 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7780 uint16_t insn2, struct regcache *regs,
7781 arm_displaced_step_copy_insn_closure *dsc)
7782 {
7783 /* PC is only allowed to be used in instruction MOV. */
7784
7785 unsigned int op = bits (insn1, 5, 8);
7786 unsigned int rn = bits (insn1, 0, 3);
7787
7788 if (op == 0x2 && rn == 0xf) /* MOV */
7789 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7790 else
7791 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7792 "dp (shift reg)", dsc);
7793 }
7794
7795
7796 /* Decode extension register load/store. Exactly the same as
7797 arm_decode_ext_reg_ld_st. */
7798
7799 static int
7800 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7801 uint16_t insn2, struct regcache *regs,
7802 arm_displaced_step_copy_insn_closure *dsc)
7803 {
7804 unsigned int opcode = bits (insn1, 4, 8);
7805
7806 switch (opcode)
7807 {
7808 case 0x04: case 0x05:
7809 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7810 "vfp/neon vmov", dsc);
7811
7812 case 0x08: case 0x0c: /* 01x00 */
7813 case 0x0a: case 0x0e: /* 01x10 */
7814 case 0x12: case 0x16: /* 10x10 */
7815 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7816 "vfp/neon vstm/vpush", dsc);
7817
7818 case 0x09: case 0x0d: /* 01x01 */
7819 case 0x0b: case 0x0f: /* 01x11 */
7820 case 0x13: case 0x17: /* 10x11 */
7821 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7822 "vfp/neon vldm/vpop", dsc);
7823
7824 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7825 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7826 "vstr", dsc);
7827 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7828 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7829 }
7830
7831 /* Should be unreachable. */
7832 return 1;
7833 }
7834
7835 static int
7836 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7837 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7838 {
7839 unsigned int op1 = bits (insn, 20, 25);
7840 int op = bit (insn, 4);
7841 unsigned int coproc = bits (insn, 8, 11);
7842
7843 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7844 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7845 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7846 && (coproc & 0xe) != 0xa)
7847 /* stc/stc2. */
7848 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7849 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7850 && (coproc & 0xe) != 0xa)
7851 /* ldc/ldc2 imm/lit. */
7852 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7853 else if ((op1 & 0x3e) == 0x00)
7854 return arm_copy_undef (gdbarch, insn, dsc);
7855 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7856 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7857 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7858 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7859 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7860 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7861 else if ((op1 & 0x30) == 0x20 && !op)
7862 {
7863 if ((coproc & 0xe) == 0xa)
7864 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7865 else
7866 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7867 }
7868 else if ((op1 & 0x30) == 0x20 && op)
7869 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7870 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7871 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7872 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7873 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7874 else if ((op1 & 0x30) == 0x30)
7875 return arm_copy_svc (gdbarch, insn, regs, dsc);
7876 else
7877 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7878 }
7879
7880 static int
7881 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7882 uint16_t insn2, struct regcache *regs,
7883 arm_displaced_step_copy_insn_closure *dsc)
7884 {
7885 unsigned int coproc = bits (insn2, 8, 11);
7886 unsigned int bit_5_8 = bits (insn1, 5, 8);
7887 unsigned int bit_9 = bit (insn1, 9);
7888 unsigned int bit_4 = bit (insn1, 4);
7889
7890 if (bit_9 == 0)
7891 {
7892 if (bit_5_8 == 2)
7893 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7894 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7895 dsc);
7896 else if (bit_5_8 == 0) /* UNDEFINED. */
7897 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7898 else
7899 {
7900 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7901 if ((coproc & 0xe) == 0xa)
7902 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7903 dsc);
7904 else /* coproc is not 101x. */
7905 {
7906 if (bit_4 == 0) /* STC/STC2. */
7907 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7908 "stc/stc2", dsc);
7909 else /* LDC/LDC2 {literal, immediate}. */
7910 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7911 regs, dsc);
7912 }
7913 }
7914 }
7915 else
7916 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7917
7918 return 0;
7919 }
7920
7921 static void
7922 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7923 arm_displaced_step_copy_insn_closure *dsc, int rd)
7924 {
7925 /* ADR Rd, #imm
7926
7927 Rewrite as:
7928
7929 Preparation: Rd <- PC
7930 Insn: ADD Rd, #imm
7931 Cleanup: Null.
7932 */
7933
7934 /* Rd <- PC */
7935 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7936 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7937 }
7938
7939 static int
7940 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7941 arm_displaced_step_copy_insn_closure *dsc,
7942 int rd, unsigned int imm)
7943 {
7944
7945 /* Encoding T2: ADDS Rd, #imm */
7946 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7947
7948 install_pc_relative (gdbarch, regs, dsc, rd);
7949
7950 return 0;
7951 }
7952
7953 static int
7954 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7955 struct regcache *regs,
7956 arm_displaced_step_copy_insn_closure *dsc)
7957 {
7958 unsigned int rd = bits (insn, 8, 10);
7959 unsigned int imm8 = bits (insn, 0, 7);
7960
7961 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7962 rd, imm8, insn);
7963
7964 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7965 }
7966
7967 static int
7968 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7969 uint16_t insn2, struct regcache *regs,
7970 arm_displaced_step_copy_insn_closure *dsc)
7971 {
7972 unsigned int rd = bits (insn2, 8, 11);
7973 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7974 extract raw immediate encoding rather than computing immediate. When
7975 generating ADD or SUB instruction, we can simply perform OR operation to
7976 set immediate into ADD. */
7977 unsigned int imm_3_8 = insn2 & 0x70ff;
7978 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7979
7980 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7981 rd, imm_i, imm_3_8, insn1, insn2);
7982
7983 if (bit (insn1, 7)) /* Encoding T2 */
7984 {
7985 /* Encoding T3: SUB Rd, Rd, #imm */
7986 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7987 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7988 }
7989 else /* Encoding T3 */
7990 {
7991 /* Encoding T3: ADD Rd, Rd, #imm */
7992 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7993 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7994 }
7995 dsc->numinsns = 2;
7996
7997 install_pc_relative (gdbarch, regs, dsc, rd);
7998
7999 return 0;
8000 }
8001
8002 static int
8003 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
8004 struct regcache *regs,
8005 arm_displaced_step_copy_insn_closure *dsc)
8006 {
8007 unsigned int rt = bits (insn1, 8, 10);
8008 unsigned int pc;
8009 int imm8 = (bits (insn1, 0, 7) << 2);
8010
8011 /* LDR Rd, #imm8
8012
8013 Rwrite as:
8014
8015 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8016
8017 Insn: LDR R0, [R2, R3];
8018 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8019
8020 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
8021
8022 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8023 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8024 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8025 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8026 /* The assembler calculates the required value of the offset from the
8027 Align(PC,4) value of this instruction to the label. */
8028 pc = pc & 0xfffffffc;
8029
8030 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8031 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8032
8033 dsc->rd = rt;
8034 dsc->u.ldst.xfersize = 4;
8035 dsc->u.ldst.rn = 0;
8036 dsc->u.ldst.immed = 0;
8037 dsc->u.ldst.writeback = 0;
8038 dsc->u.ldst.restore_r4 = 0;
8039
8040 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8041
8042 dsc->cleanup = &cleanup_load;
8043
8044 return 0;
8045 }
8046
8047 /* Copy Thumb cbnz/cbz instruction. */
8048
8049 static int
8050 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8051 struct regcache *regs,
8052 arm_displaced_step_copy_insn_closure *dsc)
8053 {
8054 int non_zero = bit (insn1, 11);
8055 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8056 CORE_ADDR from = dsc->insn_addr;
8057 int rn = bits (insn1, 0, 2);
8058 int rn_val = displaced_read_reg (regs, dsc, rn);
8059
8060 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8061 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8062 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8063 condition is false, let it be, cleanup_branch will do nothing. */
8064 if (dsc->u.branch.cond)
8065 {
8066 dsc->u.branch.cond = INST_AL;
8067 dsc->u.branch.dest = from + 4 + imm5;
8068 }
8069 else
8070 dsc->u.branch.dest = from + 2;
8071
8072 dsc->u.branch.link = 0;
8073 dsc->u.branch.exchange = 0;
8074
8075 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8076 non_zero ? "cbnz" : "cbz",
8077 rn, rn_val, insn1, dsc->u.branch.dest);
8078
8079 dsc->modinsn[0] = THUMB_NOP;
8080
8081 dsc->cleanup = &cleanup_branch;
8082 return 0;
8083 }
8084
8085 /* Copy Table Branch Byte/Halfword */
8086 static int
8087 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8088 uint16_t insn2, struct regcache *regs,
8089 arm_displaced_step_copy_insn_closure *dsc)
8090 {
8091 ULONGEST rn_val, rm_val;
8092 int is_tbh = bit (insn2, 4);
8093 CORE_ADDR halfwords = 0;
8094 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8095
8096 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8097 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8098
8099 if (is_tbh)
8100 {
8101 gdb_byte buf[2];
8102
8103 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8104 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8105 }
8106 else
8107 {
8108 gdb_byte buf[1];
8109
8110 target_read_memory (rn_val + rm_val, buf, 1);
8111 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8112 }
8113
8114 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8115 is_tbh ? "tbh" : "tbb",
8116 (unsigned int) rn_val, (unsigned int) rm_val,
8117 (unsigned int) halfwords);
8118
8119 dsc->u.branch.cond = INST_AL;
8120 dsc->u.branch.link = 0;
8121 dsc->u.branch.exchange = 0;
8122 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8123
8124 dsc->cleanup = &cleanup_branch;
8125
8126 return 0;
8127 }
8128
8129 static void
8130 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8131 arm_displaced_step_copy_insn_closure *dsc)
8132 {
8133 /* PC <- r7 */
8134 int val = displaced_read_reg (regs, dsc, 7);
8135 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8136
8137 /* r7 <- r8 */
8138 val = displaced_read_reg (regs, dsc, 8);
8139 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8140
8141 /* r8 <- tmp[0] */
8142 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8143
8144 }
8145
8146 static int
8147 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8148 struct regcache *regs,
8149 arm_displaced_step_copy_insn_closure *dsc)
8150 {
8151 dsc->u.block.regmask = insn1 & 0x00ff;
8152
8153 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8154 to :
8155
8156 (1) register list is full, that is, r0-r7 are used.
8157 Prepare: tmp[0] <- r8
8158
8159 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8160 MOV r8, r7; Move value of r7 to r8;
8161 POP {r7}; Store PC value into r7.
8162
8163 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8164
8165 (2) register list is not full, supposing there are N registers in
8166 register list (except PC, 0 <= N <= 7).
8167 Prepare: for each i, 0 - N, tmp[i] <- ri.
8168
8169 POP {r0, r1, ...., rN};
8170
8171 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8172 from tmp[] properly.
8173 */
8174 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8175 dsc->u.block.regmask, insn1);
8176
8177 if (dsc->u.block.regmask == 0xff)
8178 {
8179 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8180
8181 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8182 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8183 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8184
8185 dsc->numinsns = 3;
8186 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8187 }
8188 else
8189 {
8190 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8191 unsigned int i;
8192 unsigned int new_regmask;
8193
8194 for (i = 0; i < num_in_list + 1; i++)
8195 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8196
8197 new_regmask = (1 << (num_in_list + 1)) - 1;
8198
8199 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8200 "modified list %.4x",
8201 (int) dsc->u.block.regmask, new_regmask);
8202
8203 dsc->u.block.regmask |= 0x8000;
8204 dsc->u.block.writeback = 0;
8205 dsc->u.block.cond = INST_AL;
8206
8207 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8208
8209 dsc->cleanup = &cleanup_block_load_pc;
8210 }
8211
8212 return 0;
8213 }
8214
8215 static void
8216 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8217 struct regcache *regs,
8218 arm_displaced_step_copy_insn_closure *dsc)
8219 {
8220 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8221 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8222 int err = 0;
8223
8224 /* 16-bit thumb instructions. */
8225 switch (op_bit_12_15)
8226 {
8227 /* Shift (imme), add, subtract, move and compare. */
8228 case 0: case 1: case 2: case 3:
8229 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8230 "shift/add/sub/mov/cmp",
8231 dsc);
8232 break;
8233 case 4:
8234 switch (op_bit_10_11)
8235 {
8236 case 0: /* Data-processing */
8237 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8238 "data-processing",
8239 dsc);
8240 break;
8241 case 1: /* Special data instructions and branch and exchange. */
8242 {
8243 unsigned short op = bits (insn1, 7, 9);
8244 if (op == 6 || op == 7) /* BX or BLX */
8245 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8246 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8247 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8248 else
8249 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8250 dsc);
8251 }
8252 break;
8253 default: /* LDR (literal) */
8254 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8255 }
8256 break;
8257 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8258 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8259 break;
8260 case 10:
8261 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8262 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8263 else /* Generate SP-relative address */
8264 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8265 break;
8266 case 11: /* Misc 16-bit instructions */
8267 {
8268 switch (bits (insn1, 8, 11))
8269 {
8270 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8271 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8272 break;
8273 case 12: case 13: /* POP */
8274 if (bit (insn1, 8)) /* PC is in register list. */
8275 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8276 else
8277 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8278 break;
8279 case 15: /* If-Then, and hints */
8280 if (bits (insn1, 0, 3))
8281 /* If-Then makes up to four following instructions conditional.
8282 IT instruction itself is not conditional, so handle it as a
8283 common unmodified instruction. */
8284 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8285 dsc);
8286 else
8287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8288 break;
8289 default:
8290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8291 }
8292 }
8293 break;
8294 case 12:
8295 if (op_bit_10_11 < 2) /* Store multiple registers */
8296 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8297 else /* Load multiple registers */
8298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8299 break;
8300 case 13: /* Conditional branch and supervisor call */
8301 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8302 err = thumb_copy_b (gdbarch, insn1, dsc);
8303 else
8304 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8305 break;
8306 case 14: /* Unconditional branch */
8307 err = thumb_copy_b (gdbarch, insn1, dsc);
8308 break;
8309 default:
8310 err = 1;
8311 }
8312
8313 if (err)
8314 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8315 }
8316
8317 static int
8318 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8319 uint16_t insn1, uint16_t insn2,
8320 struct regcache *regs,
8321 arm_displaced_step_copy_insn_closure *dsc)
8322 {
8323 int rt = bits (insn2, 12, 15);
8324 int rn = bits (insn1, 0, 3);
8325 int op1 = bits (insn1, 7, 8);
8326
8327 switch (bits (insn1, 5, 6))
8328 {
8329 case 0: /* Load byte and memory hints */
8330 if (rt == 0xf) /* PLD/PLI */
8331 {
8332 if (rn == 0xf)
8333 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8334 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8335 else
8336 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8337 "pli/pld", dsc);
8338 }
8339 else
8340 {
8341 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8342 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8343 1);
8344 else
8345 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8346 "ldrb{reg, immediate}/ldrbt",
8347 dsc);
8348 }
8349
8350 break;
8351 case 1: /* Load halfword and memory hints. */
8352 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8353 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8354 "pld/unalloc memhint", dsc);
8355 else
8356 {
8357 if (rn == 0xf)
8358 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8359 2);
8360 else
8361 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8362 "ldrh/ldrht", dsc);
8363 }
8364 break;
8365 case 2: /* Load word */
8366 {
8367 int insn2_bit_8_11 = bits (insn2, 8, 11);
8368
8369 if (rn == 0xf)
8370 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8371 else if (op1 == 0x1) /* Encoding T3 */
8372 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8373 0, 1);
8374 else /* op1 == 0x0 */
8375 {
8376 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8377 /* LDR (immediate) */
8378 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8379 dsc, bit (insn2, 8), 1);
8380 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8381 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8382 "ldrt", dsc);
8383 else
8384 /* LDR (register) */
8385 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8386 dsc, 0, 0);
8387 }
8388 break;
8389 }
8390 default:
8391 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8392 break;
8393 }
8394 return 0;
8395 }
8396
8397 static void
8398 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8399 uint16_t insn2, struct regcache *regs,
8400 arm_displaced_step_copy_insn_closure *dsc)
8401 {
8402 int err = 0;
8403 unsigned short op = bit (insn2, 15);
8404 unsigned int op1 = bits (insn1, 11, 12);
8405
8406 switch (op1)
8407 {
8408 case 1:
8409 {
8410 switch (bits (insn1, 9, 10))
8411 {
8412 case 0:
8413 if (bit (insn1, 6))
8414 {
8415 /* Load/store {dual, exclusive}, table branch. */
8416 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8417 && bits (insn2, 5, 7) == 0)
8418 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8419 dsc);
8420 else
8421 /* PC is not allowed to use in load/store {dual, exclusive}
8422 instructions. */
8423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8424 "load/store dual/ex", dsc);
8425 }
8426 else /* load/store multiple */
8427 {
8428 switch (bits (insn1, 7, 8))
8429 {
8430 case 0: case 3: /* SRS, RFE */
8431 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8432 "srs/rfe", dsc);
8433 break;
8434 case 1: case 2: /* LDM/STM/PUSH/POP */
8435 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8436 break;
8437 }
8438 }
8439 break;
8440
8441 case 1:
8442 /* Data-processing (shift register). */
8443 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8444 dsc);
8445 break;
8446 default: /* Coprocessor instructions. */
8447 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8448 break;
8449 }
8450 break;
8451 }
8452 case 2: /* op1 = 2 */
8453 if (op) /* Branch and misc control. */
8454 {
8455 if (bit (insn2, 14) /* BLX/BL */
8456 || bit (insn2, 12) /* Unconditional branch */
8457 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8458 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8459 else
8460 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8461 "misc ctrl", dsc);
8462 }
8463 else
8464 {
8465 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8466 {
8467 int dp_op = bits (insn1, 4, 8);
8468 int rn = bits (insn1, 0, 3);
8469 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8470 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8471 regs, dsc);
8472 else
8473 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8474 "dp/pb", dsc);
8475 }
8476 else /* Data processing (modified immediate) */
8477 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8478 "dp/mi", dsc);
8479 }
8480 break;
8481 case 3: /* op1 = 3 */
8482 switch (bits (insn1, 9, 10))
8483 {
8484 case 0:
8485 if (bit (insn1, 4))
8486 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8487 regs, dsc);
8488 else /* NEON Load/Store and Store single data item */
8489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8490 "neon elt/struct load/store",
8491 dsc);
8492 break;
8493 case 1: /* op1 = 3, bits (9, 10) == 1 */
8494 switch (bits (insn1, 7, 8))
8495 {
8496 case 0: case 1: /* Data processing (register) */
8497 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8498 "dp(reg)", dsc);
8499 break;
8500 case 2: /* Multiply and absolute difference */
8501 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8502 "mul/mua/diff", dsc);
8503 break;
8504 case 3: /* Long multiply and divide */
8505 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8506 "lmul/lmua", dsc);
8507 break;
8508 }
8509 break;
8510 default: /* Coprocessor instructions */
8511 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8512 break;
8513 }
8514 break;
8515 default:
8516 err = 1;
8517 }
8518
8519 if (err)
8520 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8521
8522 }
8523
8524 static void
8525 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8526 struct regcache *regs,
8527 arm_displaced_step_copy_insn_closure *dsc)
8528 {
8529 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8530 uint16_t insn1
8531 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8532
8533 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8534 insn1, (unsigned long) from);
8535
8536 dsc->is_thumb = 1;
8537 dsc->insn_size = thumb_insn_size (insn1);
8538 if (thumb_insn_size (insn1) == 4)
8539 {
8540 uint16_t insn2
8541 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8542 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8543 }
8544 else
8545 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8546 }
8547
8548 void
8549 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8550 CORE_ADDR to, struct regcache *regs,
8551 arm_displaced_step_copy_insn_closure *dsc)
8552 {
8553 int err = 0;
8554 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8555 uint32_t insn;
8556
8557 /* Most displaced instructions use a 1-instruction scratch space, so set this
8558 here and override below if/when necessary. */
8559 dsc->numinsns = 1;
8560 dsc->insn_addr = from;
8561 dsc->scratch_base = to;
8562 dsc->cleanup = NULL;
8563 dsc->wrote_to_pc = 0;
8564
8565 if (!displaced_in_arm_mode (regs))
8566 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8567
8568 dsc->is_thumb = 0;
8569 dsc->insn_size = 4;
8570 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8571 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8572 (unsigned long) insn, (unsigned long) from);
8573
8574 if ((insn & 0xf0000000) == 0xf0000000)
8575 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8576 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8577 {
8578 case 0x0: case 0x1: case 0x2: case 0x3:
8579 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8580 break;
8581
8582 case 0x4: case 0x5: case 0x6:
8583 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8584 break;
8585
8586 case 0x7:
8587 err = arm_decode_media (gdbarch, insn, dsc);
8588 break;
8589
8590 case 0x8: case 0x9: case 0xa: case 0xb:
8591 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8592 break;
8593
8594 case 0xc: case 0xd: case 0xe: case 0xf:
8595 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8596 break;
8597 }
8598
8599 if (err)
8600 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8601 }
8602
8603 /* Actually set up the scratch space for a displaced instruction. */
8604
8605 void
8606 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8607 CORE_ADDR to,
8608 arm_displaced_step_copy_insn_closure *dsc)
8609 {
8610 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8611 unsigned int i, len, offset;
8612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8613 int size = dsc->is_thumb? 2 : 4;
8614 const gdb_byte *bkp_insn;
8615
8616 offset = 0;
8617 /* Poke modified instruction(s). */
8618 for (i = 0; i < dsc->numinsns; i++)
8619 {
8620 if (size == 4)
8621 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8622 dsc->modinsn[i], (unsigned long) to + offset);
8623 else if (size == 2)
8624 displaced_debug_printf ("writing insn %.4x at %.8lx",
8625 (unsigned short) dsc->modinsn[i],
8626 (unsigned long) to + offset);
8627
8628 write_memory_unsigned_integer (to + offset, size,
8629 byte_order_for_code,
8630 dsc->modinsn[i]);
8631 offset += size;
8632 }
8633
8634 /* Choose the correct breakpoint instruction. */
8635 if (dsc->is_thumb)
8636 {
8637 bkp_insn = tdep->thumb_breakpoint;
8638 len = tdep->thumb_breakpoint_size;
8639 }
8640 else
8641 {
8642 bkp_insn = tdep->arm_breakpoint;
8643 len = tdep->arm_breakpoint_size;
8644 }
8645
8646 /* Put breakpoint afterwards. */
8647 write_memory (to + offset, bkp_insn, len);
8648
8649 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8650 paddress (gdbarch, to));
8651 }
8652
8653 /* Entry point for cleaning things up after a displaced instruction has been
8654 single-stepped. */
8655
8656 void
8657 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8658 struct displaced_step_copy_insn_closure *dsc_,
8659 CORE_ADDR from, CORE_ADDR to,
8660 struct regcache *regs, bool completed_p)
8661 {
8662 /* The following block exists as a temporary measure while displaced
8663 stepping is fixed architecture at a time within GDB.
8664
8665 In an earlier implementation of displaced stepping, if GDB thought the
8666 displaced instruction had not been executed then this fix up function
8667 was never called. As a consequence, things that should be fixed by
8668 this function were left in an unfixed state.
8669
8670 However, it's not as simple as always calling this function; this
8671 function needs to be updated to decide what should be fixed up based
8672 on whether the displaced step executed or not, which requires each
8673 architecture to be considered individually.
8674
8675 Until this architecture is updated, this block replicates the old
8676 behaviour; we just restore the program counter register, and leave
8677 everything else unfixed. */
8678 if (!completed_p)
8679 {
8680 CORE_ADDR pc = regcache_read_pc (regs);
8681 pc = from + (pc - to);
8682 regcache_write_pc (regs, pc);
8683 return;
8684 }
8685
8686 arm_displaced_step_copy_insn_closure *dsc
8687 = (arm_displaced_step_copy_insn_closure *) dsc_;
8688
8689 if (dsc->cleanup)
8690 dsc->cleanup (gdbarch, regs, dsc);
8691
8692 if (!dsc->wrote_to_pc)
8693 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8694 dsc->insn_addr + dsc->insn_size);
8695
8696 }
8697
8698 #include "bfd-in2.h"
8699 #include "libcoff.h"
8700
8701 static int
8702 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8703 {
8704 gdb_disassemble_info *di
8705 = static_cast<gdb_disassemble_info *> (info->application_data);
8706 struct gdbarch *gdbarch = di->arch ();
8707
8708 if (arm_pc_is_thumb (gdbarch, memaddr))
8709 {
8710 static asymbol *asym;
8711 static combined_entry_type ce;
8712 static struct coff_symbol_struct csym;
8713 static struct bfd fake_bfd;
8714 static bfd_target fake_target;
8715
8716 if (csym.native == NULL)
8717 {
8718 /* Create a fake symbol vector containing a Thumb symbol.
8719 This is solely so that the code in print_insn_little_arm()
8720 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8721 the presence of a Thumb symbol and switch to decoding
8722 Thumb instructions. */
8723
8724 fake_target.flavour = bfd_target_coff_flavour;
8725 fake_bfd.xvec = &fake_target;
8726 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8727 csym.native = &ce;
8728 csym.symbol.the_bfd = &fake_bfd;
8729 csym.symbol.name = "fake";
8730 asym = (asymbol *) & csym;
8731 }
8732
8733 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8734 info->symbols = &asym;
8735 }
8736 else
8737 info->symbols = NULL;
8738
8739 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8740 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8741 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8742 the assert on the mismatch of info->mach and
8743 bfd_get_mach (current_program_space->exec_bfd ()) in
8744 default_print_insn. */
8745 if (current_program_space->exec_bfd () != NULL
8746 && (current_program_space->exec_bfd ()->arch_info
8747 == gdbarch_bfd_arch_info (gdbarch)))
8748 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8749
8750 return default_print_insn (memaddr, info);
8751 }
8752
8753 /* The following define instruction sequences that will cause ARM
8754 cpu's to take an undefined instruction trap. These are used to
8755 signal a breakpoint to GDB.
8756
8757 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8758 modes. A different instruction is required for each mode. The ARM
8759 cpu's can also be big or little endian. Thus four different
8760 instructions are needed to support all cases.
8761
8762 Note: ARMv4 defines several new instructions that will take the
8763 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8764 not in fact add the new instructions. The new undefined
8765 instructions in ARMv4 are all instructions that had no defined
8766 behaviour in earlier chips. There is no guarantee that they will
8767 raise an exception, but may be treated as NOP's. In practice, it
8768 may only safe to rely on instructions matching:
8769
8770 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8771 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8772 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8773
8774 Even this may only true if the condition predicate is true. The
8775 following use a condition predicate of ALWAYS so it is always TRUE.
8776
8777 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8778 and NetBSD all use a software interrupt rather than an undefined
8779 instruction to force a trap. This can be handled by by the
8780 abi-specific code during establishment of the gdbarch vector. */
8781
8782 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8783 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8784 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8785 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8786
8787 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8788 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8789 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8790 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8791
8792 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8793
8794 static int
8795 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8796 {
8797 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8798 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8799
8800 if (arm_pc_is_thumb (gdbarch, *pcptr))
8801 {
8802 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8803
8804 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8805 check whether we are replacing a 32-bit instruction. */
8806 if (tdep->thumb2_breakpoint != NULL)
8807 {
8808 gdb_byte buf[2];
8809
8810 if (target_read_memory (*pcptr, buf, 2) == 0)
8811 {
8812 unsigned short inst1;
8813
8814 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8815 if (thumb_insn_size (inst1) == 4)
8816 return ARM_BP_KIND_THUMB2;
8817 }
8818 }
8819
8820 return ARM_BP_KIND_THUMB;
8821 }
8822 else
8823 return ARM_BP_KIND_ARM;
8824
8825 }
8826
8827 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8828
8829 static const gdb_byte *
8830 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8831 {
8832 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8833
8834 switch (kind)
8835 {
8836 case ARM_BP_KIND_ARM:
8837 *size = tdep->arm_breakpoint_size;
8838 return tdep->arm_breakpoint;
8839 case ARM_BP_KIND_THUMB:
8840 *size = tdep->thumb_breakpoint_size;
8841 return tdep->thumb_breakpoint;
8842 case ARM_BP_KIND_THUMB2:
8843 *size = tdep->thumb2_breakpoint_size;
8844 return tdep->thumb2_breakpoint;
8845 default:
8846 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8847 }
8848 }
8849
8850 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8851
8852 static int
8853 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8854 struct regcache *regcache,
8855 CORE_ADDR *pcptr)
8856 {
8857 gdb_byte buf[4];
8858
8859 /* Check the memory pointed by PC is readable. */
8860 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8861 {
8862 struct arm_get_next_pcs next_pcs_ctx;
8863
8864 arm_get_next_pcs_ctor (&next_pcs_ctx,
8865 &arm_get_next_pcs_ops,
8866 gdbarch_byte_order (gdbarch),
8867 gdbarch_byte_order_for_code (gdbarch),
8868 0,
8869 regcache);
8870
8871 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8872
8873 /* If MEMADDR is the next instruction of current pc, do the
8874 software single step computation, and get the thumb mode by
8875 the destination address. */
8876 for (CORE_ADDR pc : next_pcs)
8877 {
8878 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8879 {
8880 if (IS_THUMB_ADDR (pc))
8881 {
8882 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8883 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8884 }
8885 else
8886 return ARM_BP_KIND_ARM;
8887 }
8888 }
8889 }
8890
8891 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8892 }
8893
8894 /* Extract from an array REGBUF containing the (raw) register state a
8895 function return value of type TYPE, and copy that, in virtual
8896 format, into VALBUF. */
8897
8898 static void
8899 arm_extract_return_value (struct type *type, struct regcache *regs,
8900 gdb_byte *valbuf)
8901 {
8902 struct gdbarch *gdbarch = regs->arch ();
8903 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8904 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8905
8906 if (TYPE_CODE_FLT == type->code ())
8907 {
8908 switch (tdep->fp_model)
8909 {
8910 case ARM_FLOAT_FPA:
8911 {
8912 /* The value is in register F0 in internal format. We need to
8913 extract the raw value and then convert it to the desired
8914 internal type. */
8915 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8916
8917 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8918 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8919 valbuf, type);
8920 }
8921 break;
8922
8923 case ARM_FLOAT_SOFT_FPA:
8924 case ARM_FLOAT_SOFT_VFP:
8925 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8926 not using the VFP ABI code. */
8927 case ARM_FLOAT_VFP:
8928 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8929 if (type->length () > 4)
8930 regs->cooked_read (ARM_A1_REGNUM + 1,
8931 valbuf + ARM_INT_REGISTER_SIZE);
8932 break;
8933
8934 default:
8935 internal_error (_("arm_extract_return_value: "
8936 "Floating point model not supported"));
8937 break;
8938 }
8939 }
8940 else if (type->code () == TYPE_CODE_INT
8941 || type->code () == TYPE_CODE_CHAR
8942 || type->code () == TYPE_CODE_BOOL
8943 || type->code () == TYPE_CODE_PTR
8944 || TYPE_IS_REFERENCE (type)
8945 || type->code () == TYPE_CODE_ENUM
8946 || is_fixed_point_type (type))
8947 {
8948 /* If the type is a plain integer, then the access is
8949 straight-forward. Otherwise we have to play around a bit
8950 more. */
8951 int len = type->length ();
8952 int regno = ARM_A1_REGNUM;
8953 ULONGEST tmp;
8954
8955 while (len > 0)
8956 {
8957 /* By using store_unsigned_integer we avoid having to do
8958 anything special for small big-endian values. */
8959 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8960 store_unsigned_integer (valbuf,
8961 (len > ARM_INT_REGISTER_SIZE
8962 ? ARM_INT_REGISTER_SIZE : len),
8963 byte_order, tmp);
8964 len -= ARM_INT_REGISTER_SIZE;
8965 valbuf += ARM_INT_REGISTER_SIZE;
8966 }
8967 }
8968 else
8969 {
8970 /* For a structure or union the behaviour is as if the value had
8971 been stored to word-aligned memory and then loaded into
8972 registers with 32-bit load instruction(s). */
8973 int len = type->length ();
8974 int regno = ARM_A1_REGNUM;
8975 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8976
8977 while (len > 0)
8978 {
8979 regs->cooked_read (regno++, tmpbuf);
8980 memcpy (valbuf, tmpbuf,
8981 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8982 len -= ARM_INT_REGISTER_SIZE;
8983 valbuf += ARM_INT_REGISTER_SIZE;
8984 }
8985 }
8986 }
8987
8988
8989 /* Will a function return an aggregate type in memory or in a
8990 register? Return 0 if an aggregate type can be returned in a
8991 register, 1 if it must be returned in memory. */
8992
8993 static int
8994 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8995 {
8996 enum type_code code;
8997
8998 type = check_typedef (type);
8999
9000 /* Simple, non-aggregate types (ie not including vectors and
9001 complex) are always returned in a register (or registers). */
9002 code = type->code ();
9003 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9004 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9005 return 0;
9006
9007 if (TYPE_HAS_DYNAMIC_LENGTH (type))
9008 return 1;
9009
9010 if (TYPE_CODE_ARRAY == code && type->is_vector ())
9011 {
9012 /* Vector values should be returned using ARM registers if they
9013 are not over 16 bytes. */
9014 return (type->length () > 16);
9015 }
9016
9017 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9018 if (tdep->arm_abi != ARM_ABI_APCS)
9019 {
9020 /* The AAPCS says all aggregates not larger than a word are returned
9021 in a register. */
9022 if (type->length () <= ARM_INT_REGISTER_SIZE
9023 && language_pass_by_reference (type).trivially_copyable)
9024 return 0;
9025
9026 return 1;
9027 }
9028 else
9029 {
9030 int nRc;
9031
9032 /* All aggregate types that won't fit in a register must be returned
9033 in memory. */
9034 if (type->length () > ARM_INT_REGISTER_SIZE
9035 || !language_pass_by_reference (type).trivially_copyable)
9036 return 1;
9037
9038 /* In the ARM ABI, "integer" like aggregate types are returned in
9039 registers. For an aggregate type to be integer like, its size
9040 must be less than or equal to ARM_INT_REGISTER_SIZE and the
9041 offset of each addressable subfield must be zero. Note that bit
9042 fields are not addressable, and all addressable subfields of
9043 unions always start at offset zero.
9044
9045 This function is based on the behaviour of GCC 2.95.1.
9046 See: gcc/arm.c: arm_return_in_memory() for details.
9047
9048 Note: All versions of GCC before GCC 2.95.2 do not set up the
9049 parameters correctly for a function returning the following
9050 structure: struct { float f;}; This should be returned in memory,
9051 not a register. Richard Earnshaw sent me a patch, but I do not
9052 know of any way to detect if a function like the above has been
9053 compiled with the correct calling convention. */
9054
9055 /* Assume all other aggregate types can be returned in a register.
9056 Run a check for structures, unions and arrays. */
9057 nRc = 0;
9058
9059 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9060 {
9061 int i;
9062 /* Need to check if this struct/union is "integer" like. For
9063 this to be true, its size must be less than or equal to
9064 ARM_INT_REGISTER_SIZE and the offset of each addressable
9065 subfield must be zero. Note that bit fields are not
9066 addressable, and unions always start at offset zero. If any
9067 of the subfields is a floating point type, the struct/union
9068 cannot be an integer type. */
9069
9070 /* For each field in the object, check:
9071 1) Is it FP? --> yes, nRc = 1;
9072 2) Is it addressable (bitpos != 0) and
9073 not packed (bitsize == 0)?
9074 --> yes, nRc = 1
9075 */
9076
9077 for (i = 0; i < type->num_fields (); i++)
9078 {
9079 enum type_code field_type_code;
9080
9081 field_type_code
9082 = check_typedef (type->field (i).type ())->code ();
9083
9084 /* Is it a floating point type field? */
9085 if (field_type_code == TYPE_CODE_FLT)
9086 {
9087 nRc = 1;
9088 break;
9089 }
9090
9091 /* If bitpos != 0, then we have to care about it. */
9092 if (type->field (i).loc_bitpos () != 0)
9093 {
9094 /* Bitfields are not addressable. If the field bitsize is
9095 zero, then the field is not packed. Hence it cannot be
9096 a bitfield or any other packed type. */
9097 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9098 {
9099 nRc = 1;
9100 break;
9101 }
9102 }
9103 }
9104 }
9105
9106 return nRc;
9107 }
9108 }
9109
9110 /* Write into appropriate registers a function return value of type
9111 TYPE, given in virtual format. */
9112
9113 static void
9114 arm_store_return_value (struct type *type, struct regcache *regs,
9115 const gdb_byte *valbuf)
9116 {
9117 struct gdbarch *gdbarch = regs->arch ();
9118 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9119
9120 if (type->code () == TYPE_CODE_FLT)
9121 {
9122 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9123 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9124
9125 switch (tdep->fp_model)
9126 {
9127 case ARM_FLOAT_FPA:
9128
9129 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9130 regs->cooked_write (ARM_F0_REGNUM, buf);
9131 break;
9132
9133 case ARM_FLOAT_SOFT_FPA:
9134 case ARM_FLOAT_SOFT_VFP:
9135 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9136 not using the VFP ABI code. */
9137 case ARM_FLOAT_VFP:
9138 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9139 if (type->length () > 4)
9140 regs->cooked_write (ARM_A1_REGNUM + 1,
9141 valbuf + ARM_INT_REGISTER_SIZE);
9142 break;
9143
9144 default:
9145 internal_error (_("arm_store_return_value: Floating "
9146 "point model not supported"));
9147 break;
9148 }
9149 }
9150 else if (type->code () == TYPE_CODE_INT
9151 || type->code () == TYPE_CODE_CHAR
9152 || type->code () == TYPE_CODE_BOOL
9153 || type->code () == TYPE_CODE_PTR
9154 || TYPE_IS_REFERENCE (type)
9155 || type->code () == TYPE_CODE_ENUM)
9156 {
9157 if (type->length () <= 4)
9158 {
9159 /* Values of one word or less are zero/sign-extended and
9160 returned in r0. */
9161 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9162 LONGEST val = unpack_long (type, valbuf);
9163
9164 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9165 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9166 }
9167 else
9168 {
9169 /* Integral values greater than one word are stored in consecutive
9170 registers starting with r0. This will always be a multiple of
9171 the regiser size. */
9172 int len = type->length ();
9173 int regno = ARM_A1_REGNUM;
9174
9175 while (len > 0)
9176 {
9177 regs->cooked_write (regno++, valbuf);
9178 len -= ARM_INT_REGISTER_SIZE;
9179 valbuf += ARM_INT_REGISTER_SIZE;
9180 }
9181 }
9182 }
9183 else
9184 {
9185 /* For a structure or union the behaviour is as if the value had
9186 been stored to word-aligned memory and then loaded into
9187 registers with 32-bit load instruction(s). */
9188 int len = type->length ();
9189 int regno = ARM_A1_REGNUM;
9190 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9191
9192 while (len > 0)
9193 {
9194 memcpy (tmpbuf, valbuf,
9195 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9196 regs->cooked_write (regno++, tmpbuf);
9197 len -= ARM_INT_REGISTER_SIZE;
9198 valbuf += ARM_INT_REGISTER_SIZE;
9199 }
9200 }
9201 }
9202
9203
9204 /* Handle function return values. */
9205
9206 static enum return_value_convention
9207 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9208 struct type *valtype, struct regcache *regcache,
9209 struct value **read_value, const gdb_byte *writebuf)
9210 {
9211 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9212 struct type *func_type = function ? function->type () : NULL;
9213 enum arm_vfp_cprc_base_type vfp_base_type;
9214 int vfp_base_count;
9215
9216 if (arm_vfp_abi_for_function (gdbarch, func_type)
9217 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9218 {
9219 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9220 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9221 int i;
9222
9223 gdb_byte *readbuf = nullptr;
9224 if (read_value != nullptr)
9225 {
9226 *read_value = value::allocate (valtype);
9227 readbuf = (*read_value)->contents_raw ().data ();
9228 }
9229
9230 for (i = 0; i < vfp_base_count; i++)
9231 {
9232 if (reg_char == 'q')
9233 {
9234 if (writebuf)
9235 arm_neon_quad_write (gdbarch, regcache, i,
9236 writebuf + i * unit_length);
9237
9238 if (readbuf)
9239 arm_neon_quad_read (gdbarch, regcache, i,
9240 readbuf + i * unit_length);
9241 }
9242 else
9243 {
9244 char name_buf[4];
9245 int regnum;
9246
9247 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9248 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9249 strlen (name_buf));
9250 if (writebuf)
9251 regcache->cooked_write (regnum, writebuf + i * unit_length);
9252 if (readbuf)
9253 regcache->cooked_read (regnum, readbuf + i * unit_length);
9254 }
9255 }
9256 return RETURN_VALUE_REGISTER_CONVENTION;
9257 }
9258
9259 if (valtype->code () == TYPE_CODE_STRUCT
9260 || valtype->code () == TYPE_CODE_UNION
9261 || valtype->code () == TYPE_CODE_ARRAY)
9262 {
9263 /* From the AAPCS document:
9264
9265 Result return:
9266
9267 A Composite Type larger than 4 bytes, or whose size cannot be
9268 determined statically by both caller and callee, is stored in memory
9269 at an address passed as an extra argument when the function was
9270 called (Parameter Passing, rule A.4). The memory to be used for the
9271 result may be modified at any point during the function call.
9272
9273 Parameter Passing:
9274
9275 A.4: If the subroutine is a function that returns a result in memory,
9276 then the address for the result is placed in r0 and the NCRN is set
9277 to r1. */
9278 if (tdep->struct_return == pcc_struct_return
9279 || arm_return_in_memory (gdbarch, valtype))
9280 {
9281 if (read_value != nullptr)
9282 {
9283 CORE_ADDR addr;
9284
9285 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9286 *read_value = value_at_non_lval (valtype, addr);
9287 }
9288 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9289 }
9290 }
9291 else if (valtype->code () == TYPE_CODE_COMPLEX)
9292 {
9293 if (arm_return_in_memory (gdbarch, valtype))
9294 return RETURN_VALUE_STRUCT_CONVENTION;
9295 }
9296
9297 if (writebuf)
9298 arm_store_return_value (valtype, regcache, writebuf);
9299
9300 if (read_value != nullptr)
9301 {
9302 *read_value = value::allocate (valtype);
9303 gdb_byte *readbuf = (*read_value)->contents_raw ().data ();
9304 arm_extract_return_value (valtype, regcache, readbuf);
9305 }
9306
9307 return RETURN_VALUE_REGISTER_CONVENTION;
9308 }
9309
9310
9311 static int
9312 arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc)
9313 {
9314 struct gdbarch *gdbarch = get_frame_arch (frame);
9315 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9316 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9317 CORE_ADDR jb_addr;
9318 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9319
9320 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9321
9322 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9323 ARM_INT_REGISTER_SIZE))
9324 return 0;
9325
9326 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9327 return 1;
9328 }
9329 /* A call to cmse secure entry function "foo" at "a" is modified by
9330 GNU ld as "b".
9331 a) bl xxxx <foo>
9332
9333 <foo>
9334 xxxx:
9335
9336 b) bl yyyy <__acle_se_foo>
9337
9338 section .gnu.sgstubs:
9339 <foo>
9340 yyyy: sg // secure gateway
9341 b.w xxxx <__acle_se_foo> // original_branch_dest
9342
9343 <__acle_se_foo>
9344 xxxx:
9345
9346 When the control at "b", the pc contains "yyyy" (sg address) which is a
9347 trampoline and does not exist in source code. This function returns the
9348 target pc "xxxx". For more details please refer to section 5.4
9349 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9350 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9351 document on www.developer.arm.com. */
9352
9353 static CORE_ADDR
9354 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9355 {
9356 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9357 char *target_name = (char *) alloca (target_len);
9358 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9359
9360 struct bound_minimal_symbol minsym
9361 = lookup_minimal_symbol (target_name, NULL, objfile);
9362
9363 if (minsym.minsym != nullptr)
9364 return minsym.value_address ();
9365
9366 return 0;
9367 }
9368
9369 /* Return true when SEC points to ".gnu.sgstubs" section. */
9370
9371 static bool
9372 arm_is_sgstubs_section (struct obj_section *sec)
9373 {
9374 return (sec != nullptr
9375 && sec->the_bfd_section != nullptr
9376 && sec->the_bfd_section->name != nullptr
9377 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9378 }
9379
9380 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9381 return the target PC. Otherwise return 0. */
9382
9383 CORE_ADDR
9384 arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc)
9385 {
9386 const char *name;
9387 int namelen;
9388 CORE_ADDR start_addr;
9389
9390 /* Find the starting address and name of the function containing the PC. */
9391 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9392 {
9393 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9394 check here. */
9395 start_addr = arm_skip_bx_reg (frame, pc);
9396 if (start_addr != 0)
9397 return start_addr;
9398
9399 return 0;
9400 }
9401
9402 /* If PC is in a Thumb call or return stub, return the address of the
9403 target PC, which is in a register. The thunk functions are called
9404 _call_via_xx, where x is the register name. The possible names
9405 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9406 functions, named __ARM_call_via_r[0-7]. */
9407 if (startswith (name, "_call_via_")
9408 || startswith (name, "__ARM_call_via_"))
9409 {
9410 /* Use the name suffix to determine which register contains the
9411 target PC. */
9412 static const char *table[15] =
9413 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9414 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9415 };
9416 int regno;
9417 int offset = strlen (name) - 2;
9418
9419 for (regno = 0; regno <= 14; regno++)
9420 if (strcmp (&name[offset], table[regno]) == 0)
9421 return get_frame_register_unsigned (frame, regno);
9422 }
9423
9424 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9425 non-interworking calls to foo. We could decode the stubs
9426 to find the target but it's easier to use the symbol table. */
9427 namelen = strlen (name);
9428 if (name[0] == '_' && name[1] == '_'
9429 && ((namelen > 2 + strlen ("_from_thumb")
9430 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9431 || (namelen > 2 + strlen ("_from_arm")
9432 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9433 {
9434 char *target_name;
9435 int target_len = namelen - 2;
9436 struct bound_minimal_symbol minsym;
9437 struct objfile *objfile;
9438 struct obj_section *sec;
9439
9440 if (name[namelen - 1] == 'b')
9441 target_len -= strlen ("_from_thumb");
9442 else
9443 target_len -= strlen ("_from_arm");
9444
9445 target_name = (char *) alloca (target_len + 1);
9446 memcpy (target_name, name + 2, target_len);
9447 target_name[target_len] = '\0';
9448
9449 sec = find_pc_section (pc);
9450 objfile = (sec == NULL) ? NULL : sec->objfile;
9451 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9452 if (minsym.minsym != NULL)
9453 return minsym.value_address ();
9454 else
9455 return 0;
9456 }
9457
9458 struct obj_section *section = find_pc_section (pc);
9459
9460 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9461 if (arm_is_sgstubs_section (section))
9462 return arm_skip_cmse_entry (pc, name, section->objfile);
9463
9464 return 0; /* not a stub */
9465 }
9466
9467 static void
9468 arm_update_current_architecture (void)
9469 {
9470 /* If the current architecture is not ARM, we have nothing to do. */
9471 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9472 return;
9473
9474 /* Update the architecture. */
9475 gdbarch_info info;
9476 if (!gdbarch_update_p (info))
9477 internal_error (_("could not update architecture"));
9478 }
9479
9480 static void
9481 set_fp_model_sfunc (const char *args, int from_tty,
9482 struct cmd_list_element *c)
9483 {
9484 int fp_model;
9485
9486 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9487 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9488 {
9489 arm_fp_model = (enum arm_float_model) fp_model;
9490 break;
9491 }
9492
9493 if (fp_model == ARM_FLOAT_LAST)
9494 internal_error (_("Invalid fp model accepted: %s."),
9495 current_fp_model);
9496
9497 arm_update_current_architecture ();
9498 }
9499
9500 static void
9501 show_fp_model (struct ui_file *file, int from_tty,
9502 struct cmd_list_element *c, const char *value)
9503 {
9504 if (arm_fp_model == ARM_FLOAT_AUTO
9505 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9506 {
9507 arm_gdbarch_tdep *tdep
9508 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9509
9510 gdb_printf (file, _("\
9511 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9512 fp_model_strings[tdep->fp_model]);
9513 }
9514 else
9515 gdb_printf (file, _("\
9516 The current ARM floating point model is \"%s\".\n"),
9517 fp_model_strings[arm_fp_model]);
9518 }
9519
9520 static void
9521 arm_set_abi (const char *args, int from_tty,
9522 struct cmd_list_element *c)
9523 {
9524 int arm_abi;
9525
9526 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9527 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9528 {
9529 arm_abi_global = (enum arm_abi_kind) arm_abi;
9530 break;
9531 }
9532
9533 if (arm_abi == ARM_ABI_LAST)
9534 internal_error (_("Invalid ABI accepted: %s."),
9535 arm_abi_string);
9536
9537 arm_update_current_architecture ();
9538 }
9539
9540 static void
9541 arm_show_abi (struct ui_file *file, int from_tty,
9542 struct cmd_list_element *c, const char *value)
9543 {
9544 if (arm_abi_global == ARM_ABI_AUTO
9545 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9546 {
9547 arm_gdbarch_tdep *tdep
9548 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9549
9550 gdb_printf (file, _("\
9551 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9552 arm_abi_strings[tdep->arm_abi]);
9553 }
9554 else
9555 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9556 arm_abi_string);
9557 }
9558
9559 static void
9560 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9561 struct cmd_list_element *c, const char *value)
9562 {
9563 gdb_printf (file,
9564 _("The current execution mode assumed "
9565 "(when symbols are unavailable) is \"%s\".\n"),
9566 arm_fallback_mode_string);
9567 }
9568
9569 static void
9570 arm_show_force_mode (struct ui_file *file, int from_tty,
9571 struct cmd_list_element *c, const char *value)
9572 {
9573 gdb_printf (file,
9574 _("The current execution mode assumed "
9575 "(even when symbols are available) is \"%s\".\n"),
9576 arm_force_mode_string);
9577 }
9578
9579 static void
9580 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9581 struct cmd_list_element *c, const char *value)
9582 {
9583 gdb_printf (file,
9584 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9585 arm_unwind_secure_frames ? "on" : "off");
9586 }
9587
9588 /* If the user changes the register disassembly style used for info
9589 register and other commands, we have to also switch the style used
9590 in opcodes for disassembly output. This function is run in the "set
9591 arm disassembly" command, and does that. */
9592
9593 static void
9594 set_disassembly_style_sfunc (const char *args, int from_tty,
9595 struct cmd_list_element *c)
9596 {
9597 /* Convert the short style name into the long style name (eg, reg-names-*)
9598 before calling the generic set_disassembler_options() function. */
9599 std::string long_name = std::string ("reg-names-") + disassembly_style;
9600 set_disassembler_options (&long_name[0]);
9601 }
9602
9603 static void
9604 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9605 struct cmd_list_element *c, const char *value)
9606 {
9607 struct gdbarch *gdbarch = get_current_arch ();
9608 char *options = get_disassembler_options (gdbarch);
9609 const char *style = "";
9610 int len = 0;
9611 const char *opt;
9612
9613 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9614 if (startswith (opt, "reg-names-"))
9615 {
9616 style = &opt[strlen ("reg-names-")];
9617 len = strcspn (style, ",");
9618 }
9619
9620 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9621 }
9622 \f
9623 /* Return the ARM register name corresponding to register I. */
9624 static const char *
9625 arm_register_name (struct gdbarch *gdbarch, int i)
9626 {
9627 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9628
9629 if (is_s_pseudo (gdbarch, i))
9630 {
9631 static const char *const s_pseudo_names[] = {
9632 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9633 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9634 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9635 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9636 };
9637
9638 return s_pseudo_names[i - tdep->s_pseudo_base];
9639 }
9640
9641 if (is_q_pseudo (gdbarch, i))
9642 {
9643 static const char *const q_pseudo_names[] = {
9644 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9645 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9646 };
9647
9648 return q_pseudo_names[i - tdep->q_pseudo_base];
9649 }
9650
9651 if (is_mve_pseudo (gdbarch, i))
9652 return "p0";
9653
9654 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9655 if (is_pacbti_pseudo (gdbarch, i))
9656 return "";
9657
9658 if (i >= ARRAY_SIZE (arm_register_names))
9659 /* These registers are only supported on targets which supply
9660 an XML description. */
9661 return "";
9662
9663 /* Non-pseudo registers. */
9664 return arm_register_names[i];
9665 }
9666
9667 /* Test whether the coff symbol specific value corresponds to a Thumb
9668 function. */
9669
9670 static int
9671 coff_sym_is_thumb (int val)
9672 {
9673 return (val == C_THUMBEXT
9674 || val == C_THUMBSTAT
9675 || val == C_THUMBEXTFUNC
9676 || val == C_THUMBSTATFUNC
9677 || val == C_THUMBLABEL);
9678 }
9679
9680 /* arm_coff_make_msymbol_special()
9681 arm_elf_make_msymbol_special()
9682
9683 These functions test whether the COFF or ELF symbol corresponds to
9684 an address in thumb code, and set a "special" bit in a minimal
9685 symbol to indicate that it does. */
9686
9687 static void
9688 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9689 {
9690 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9691
9692 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9693 == ST_BRANCH_TO_THUMB)
9694 MSYMBOL_SET_SPECIAL (msym);
9695 }
9696
9697 static void
9698 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9699 {
9700 if (coff_sym_is_thumb (val))
9701 MSYMBOL_SET_SPECIAL (msym);
9702 }
9703
9704 static void
9705 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9706 asymbol *sym)
9707 {
9708 const char *name = bfd_asymbol_name (sym);
9709 struct arm_per_bfd *data;
9710 struct arm_mapping_symbol new_map_sym;
9711
9712 gdb_assert (name[0] == '$');
9713 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9714 return;
9715
9716 data = arm_bfd_data_key.get (objfile->obfd.get ());
9717 if (data == NULL)
9718 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9719 objfile->obfd->section_count);
9720 arm_mapping_symbol_vec &map
9721 = data->section_maps[bfd_asymbol_section (sym)->index];
9722
9723 new_map_sym.value = sym->value;
9724 new_map_sym.type = name[1];
9725
9726 /* Insert at the end, the vector will be sorted on first use. */
9727 map.push_back (new_map_sym);
9728 }
9729
9730 static void
9731 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9732 {
9733 struct gdbarch *gdbarch = regcache->arch ();
9734 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9735
9736 /* If necessary, set the T bit. */
9737 if (arm_apcs_32)
9738 {
9739 ULONGEST val, t_bit;
9740 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9741 t_bit = arm_psr_thumb_bit (gdbarch);
9742 if (arm_pc_is_thumb (gdbarch, pc))
9743 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9744 val | t_bit);
9745 else
9746 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9747 val & ~t_bit);
9748 }
9749 }
9750
9751 /* Read the contents of a NEON quad register, by reading from two
9752 double registers. This is used to implement the quad pseudo
9753 registers, and for argument passing in case the quad registers are
9754 missing; vectors are passed in quad registers when using the VFP
9755 ABI, even if a NEON unit is not present. REGNUM is the index of
9756 the quad register, in [0, 15]. */
9757
9758 static enum register_status
9759 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9760 int regnum, gdb_byte *buf)
9761 {
9762 char name_buf[4];
9763 gdb_byte reg_buf[8];
9764 int offset, double_regnum;
9765 enum register_status status;
9766
9767 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9768 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9769 strlen (name_buf));
9770
9771 /* d0 is always the least significant half of q0. */
9772 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9773 offset = 8;
9774 else
9775 offset = 0;
9776
9777 status = regcache->raw_read (double_regnum, reg_buf);
9778 if (status != REG_VALID)
9779 return status;
9780 memcpy (buf + offset, reg_buf, 8);
9781
9782 offset = 8 - offset;
9783 status = regcache->raw_read (double_regnum + 1, reg_buf);
9784 if (status != REG_VALID)
9785 return status;
9786 memcpy (buf + offset, reg_buf, 8);
9787
9788 return REG_VALID;
9789 }
9790
9791 /* Read the contents of the MVE pseudo register REGNUM and store it
9792 in BUF. */
9793
9794 static enum register_status
9795 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9796 int regnum, gdb_byte *buf)
9797 {
9798 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9799
9800 /* P0 is the first 16 bits of VPR. */
9801 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9802 }
9803
9804 static enum register_status
9805 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9806 int regnum, gdb_byte *buf)
9807 {
9808 const int num_regs = gdbarch_num_regs (gdbarch);
9809 char name_buf[4];
9810 gdb_byte reg_buf[8];
9811 int offset, double_regnum;
9812 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9813
9814 gdb_assert (regnum >= num_regs);
9815
9816 if (is_q_pseudo (gdbarch, regnum))
9817 {
9818 /* Quad-precision register. */
9819 return arm_neon_quad_read (gdbarch, regcache,
9820 regnum - tdep->q_pseudo_base, buf);
9821 }
9822 else if (is_mve_pseudo (gdbarch, regnum))
9823 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9824 else
9825 {
9826 enum register_status status;
9827
9828 regnum -= tdep->s_pseudo_base;
9829 /* Single-precision register. */
9830 gdb_assert (regnum < 32);
9831
9832 /* s0 is always the least significant half of d0. */
9833 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9834 offset = (regnum & 1) ? 0 : 4;
9835 else
9836 offset = (regnum & 1) ? 4 : 0;
9837
9838 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9839 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9840 strlen (name_buf));
9841
9842 status = regcache->raw_read (double_regnum, reg_buf);
9843 if (status == REG_VALID)
9844 memcpy (buf, reg_buf + offset, 4);
9845 return status;
9846 }
9847 }
9848
9849 /* Store the contents of BUF to a NEON quad register, by writing to
9850 two double registers. This is used to implement the quad pseudo
9851 registers, and for argument passing in case the quad registers are
9852 missing; vectors are passed in quad registers when using the VFP
9853 ABI, even if a NEON unit is not present. REGNUM is the index
9854 of the quad register, in [0, 15]. */
9855
9856 static void
9857 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9858 int regnum, const gdb_byte *buf)
9859 {
9860 char name_buf[4];
9861 int offset, double_regnum;
9862
9863 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9864 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9865 strlen (name_buf));
9866
9867 /* d0 is always the least significant half of q0. */
9868 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9869 offset = 8;
9870 else
9871 offset = 0;
9872
9873 regcache->raw_write (double_regnum, buf + offset);
9874 offset = 8 - offset;
9875 regcache->raw_write (double_regnum + 1, buf + offset);
9876 }
9877
9878 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9879
9880 static void
9881 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9882 int regnum, const gdb_byte *buf)
9883 {
9884 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9885
9886 /* P0 is the first 16 bits of VPR. */
9887 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9888 }
9889
9890 static void
9891 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9892 int regnum, const gdb_byte *buf)
9893 {
9894 const int num_regs = gdbarch_num_regs (gdbarch);
9895 char name_buf[4];
9896 gdb_byte reg_buf[8];
9897 int offset, double_regnum;
9898 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9899
9900 gdb_assert (regnum >= num_regs);
9901
9902 if (is_q_pseudo (gdbarch, regnum))
9903 {
9904 /* Quad-precision register. */
9905 arm_neon_quad_write (gdbarch, regcache,
9906 regnum - tdep->q_pseudo_base, buf);
9907 }
9908 else if (is_mve_pseudo (gdbarch, regnum))
9909 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9910 else
9911 {
9912 regnum -= tdep->s_pseudo_base;
9913 /* Single-precision register. */
9914 gdb_assert (regnum < 32);
9915
9916 /* s0 is always the least significant half of d0. */
9917 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9918 offset = (regnum & 1) ? 0 : 4;
9919 else
9920 offset = (regnum & 1) ? 4 : 0;
9921
9922 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9923 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9924 strlen (name_buf));
9925
9926 regcache->raw_read (double_regnum, reg_buf);
9927 memcpy (reg_buf + offset, buf, 4);
9928 regcache->raw_write (double_regnum, reg_buf);
9929 }
9930 }
9931
9932 static struct value *
9933 value_of_arm_user_reg (frame_info_ptr frame, const void *baton)
9934 {
9935 const int *reg_p = (const int *) baton;
9936 return value_of_register (*reg_p, frame);
9937 }
9938 \f
9939 static enum gdb_osabi
9940 arm_elf_osabi_sniffer (bfd *abfd)
9941 {
9942 unsigned int elfosabi;
9943 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9944
9945 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9946
9947 if (elfosabi == ELFOSABI_ARM)
9948 /* GNU tools use this value. Check note sections in this case,
9949 as well. */
9950 {
9951 for (asection *sect : gdb_bfd_sections (abfd))
9952 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9953 }
9954
9955 /* Anything else will be handled by the generic ELF sniffer. */
9956 return osabi;
9957 }
9958
9959 static int
9960 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9961 const struct reggroup *group)
9962 {
9963 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9964 this, FPS register belongs to save_regroup, restore_reggroup, and
9965 all_reggroup, of course. */
9966 if (regnum == ARM_FPS_REGNUM)
9967 return (group == float_reggroup
9968 || group == save_reggroup
9969 || group == restore_reggroup
9970 || group == all_reggroup);
9971 else
9972 return default_register_reggroup_p (gdbarch, regnum, group);
9973 }
9974
9975 /* For backward-compatibility we allow two 'g' packet lengths with
9976 the remote protocol depending on whether FPA registers are
9977 supplied. M-profile targets do not have FPA registers, but some
9978 stubs already exist in the wild which use a 'g' packet which
9979 supplies them albeit with dummy values. The packet format which
9980 includes FPA registers should be considered deprecated for
9981 M-profile targets. */
9982
9983 static void
9984 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9985 {
9986 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9987
9988 if (tdep->is_m)
9989 {
9990 const target_desc *tdesc;
9991
9992 /* If we know from the executable this is an M-profile target,
9993 cater for remote targets whose register set layout is the
9994 same as the FPA layout. */
9995 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9996 register_remote_g_packet_guess (gdbarch,
9997 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9998 tdesc);
9999
10000 /* The regular M-profile layout. */
10001 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
10002 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
10003 tdesc);
10004
10005 /* M-profile plus M4F VFP. */
10006 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
10007 register_remote_g_packet_guess (gdbarch,
10008 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
10009 tdesc);
10010 /* M-profile plus MVE. */
10011 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
10012 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
10013 + ARM_VFP2_REGS_SIZE
10014 + ARM_INT_REGISTER_SIZE, tdesc);
10015
10016 /* M-profile system (stack pointers). */
10017 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
10018 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
10019 }
10020
10021 /* Otherwise we don't have a useful guess. */
10022 }
10023
10024 /* Implement the code_of_frame_writable gdbarch method. */
10025
10026 static int
10027 arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame)
10028 {
10029 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10030
10031 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
10032 {
10033 /* M-profile exception frames return to some magic PCs, where
10034 isn't writable at all. */
10035 return 0;
10036 }
10037 else
10038 return 1;
10039 }
10040
10041 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
10042 to be postfixed by a version (eg armv7hl). */
10043
10044 static const char *
10045 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
10046 {
10047 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
10048 return "arm(v[^- ]*)?";
10049 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
10050 }
10051
10052 /* Implement the "get_pc_address_flags" gdbarch method. */
10053
10054 static std::string
10055 arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc)
10056 {
10057 if (get_frame_pc_masked (frame))
10058 return "PAC";
10059
10060 return "";
10061 }
10062
10063 /* Initialize the current architecture based on INFO. If possible,
10064 re-use an architecture from ARCHES, which is a list of
10065 architectures already created during this debugging session.
10066
10067 Called e.g. at program startup, when reading a core file, and when
10068 reading a binary file. */
10069
10070 static struct gdbarch *
10071 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
10072 {
10073 struct gdbarch_list *best_arch;
10074 enum arm_abi_kind arm_abi = arm_abi_global;
10075 enum arm_float_model fp_model = arm_fp_model;
10076 tdesc_arch_data_up tdesc_data;
10077 int i;
10078 bool is_m = false;
10079 bool have_sec_ext = false;
10080 int vfp_register_count = 0;
10081 bool have_s_pseudos = false, have_q_pseudos = false;
10082 bool have_wmmx_registers = false;
10083 bool have_neon = false;
10084 bool have_fpa_registers = true;
10085 const struct target_desc *tdesc = info.target_desc;
10086 bool have_vfp = false;
10087 bool have_mve = false;
10088 bool have_pacbti = false;
10089 int mve_vpr_regnum = -1;
10090 int register_count = ARM_NUM_REGS;
10091 bool have_m_profile_msp = false;
10092 int m_profile_msp_regnum = -1;
10093 int m_profile_psp_regnum = -1;
10094 int m_profile_msp_ns_regnum = -1;
10095 int m_profile_psp_ns_regnum = -1;
10096 int m_profile_msp_s_regnum = -1;
10097 int m_profile_psp_s_regnum = -1;
10098 int tls_regnum = 0;
10099
10100 /* If we have an object to base this architecture on, try to determine
10101 its ABI. */
10102
10103 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10104 {
10105 int ei_osabi, e_flags;
10106
10107 switch (bfd_get_flavour (info.abfd))
10108 {
10109 case bfd_target_coff_flavour:
10110 /* Assume it's an old APCS-style ABI. */
10111 /* XXX WinCE? */
10112 arm_abi = ARM_ABI_APCS;
10113 break;
10114
10115 case bfd_target_elf_flavour:
10116 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10117 e_flags = elf_elfheader (info.abfd)->e_flags;
10118
10119 if (ei_osabi == ELFOSABI_ARM)
10120 {
10121 /* GNU tools used to use this value, but do not for EABI
10122 objects. There's nowhere to tag an EABI version
10123 anyway, so assume APCS. */
10124 arm_abi = ARM_ABI_APCS;
10125 }
10126 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10127 {
10128 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10129
10130 switch (eabi_ver)
10131 {
10132 case EF_ARM_EABI_UNKNOWN:
10133 /* Assume GNU tools. */
10134 arm_abi = ARM_ABI_APCS;
10135 break;
10136
10137 case EF_ARM_EABI_VER4:
10138 case EF_ARM_EABI_VER5:
10139 arm_abi = ARM_ABI_AAPCS;
10140 /* EABI binaries default to VFP float ordering.
10141 They may also contain build attributes that can
10142 be used to identify if the VFP argument-passing
10143 ABI is in use. */
10144 if (fp_model == ARM_FLOAT_AUTO)
10145 {
10146 #ifdef HAVE_ELF
10147 switch (bfd_elf_get_obj_attr_int (info.abfd,
10148 OBJ_ATTR_PROC,
10149 Tag_ABI_VFP_args))
10150 {
10151 case AEABI_VFP_args_base:
10152 /* "The user intended FP parameter/result
10153 passing to conform to AAPCS, base
10154 variant". */
10155 fp_model = ARM_FLOAT_SOFT_VFP;
10156 break;
10157 case AEABI_VFP_args_vfp:
10158 /* "The user intended FP parameter/result
10159 passing to conform to AAPCS, VFP
10160 variant". */
10161 fp_model = ARM_FLOAT_VFP;
10162 break;
10163 case AEABI_VFP_args_toolchain:
10164 /* "The user intended FP parameter/result
10165 passing to conform to tool chain-specific
10166 conventions" - we don't know any such
10167 conventions, so leave it as "auto". */
10168 break;
10169 case AEABI_VFP_args_compatible:
10170 /* "Code is compatible with both the base
10171 and VFP variants; the user did not permit
10172 non-variadic functions to pass FP
10173 parameters/results" - leave it as
10174 "auto". */
10175 break;
10176 default:
10177 /* Attribute value not mentioned in the
10178 November 2012 ABI, so leave it as
10179 "auto". */
10180 break;
10181 }
10182 #else
10183 fp_model = ARM_FLOAT_SOFT_VFP;
10184 #endif
10185 }
10186 break;
10187
10188 default:
10189 /* Leave it as "auto". */
10190 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10191 break;
10192 }
10193
10194 #ifdef HAVE_ELF
10195 /* Detect M-profile programs. This only works if the
10196 executable file includes build attributes; GCC does
10197 copy them to the executable, but e.g. RealView does
10198 not. */
10199 int attr_arch
10200 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10201 Tag_CPU_arch);
10202 int attr_profile
10203 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10204 Tag_CPU_arch_profile);
10205
10206 /* GCC specifies the profile for v6-M; RealView only
10207 specifies the profile for architectures starting with
10208 V7 (as opposed to architectures with a tag
10209 numerically greater than TAG_CPU_ARCH_V7). */
10210 if (!tdesc_has_registers (tdesc)
10211 && (attr_arch == TAG_CPU_ARCH_V6_M
10212 || attr_arch == TAG_CPU_ARCH_V6S_M
10213 || attr_arch == TAG_CPU_ARCH_V7E_M
10214 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10215 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10216 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10217 || attr_profile == 'M'))
10218 is_m = true;
10219
10220 /* Look for attributes that indicate support for ARMv8.1-m
10221 PACBTI. */
10222 if (!tdesc_has_registers (tdesc) && is_m)
10223 {
10224 int attr_pac_extension
10225 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10226 Tag_PAC_extension);
10227
10228 int attr_bti_extension
10229 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10230 Tag_BTI_extension);
10231
10232 int attr_pacret_use
10233 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10234 Tag_PACRET_use);
10235
10236 int attr_bti_use
10237 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10238 Tag_BTI_use);
10239
10240 if (attr_pac_extension != 0 || attr_bti_extension != 0
10241 || attr_pacret_use != 0 || attr_bti_use != 0)
10242 have_pacbti = true;
10243 }
10244 #endif
10245 }
10246
10247 if (fp_model == ARM_FLOAT_AUTO)
10248 {
10249 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10250 {
10251 case 0:
10252 /* Leave it as "auto". Strictly speaking this case
10253 means FPA, but almost nobody uses that now, and
10254 many toolchains fail to set the appropriate bits
10255 for the floating-point model they use. */
10256 break;
10257 case EF_ARM_SOFT_FLOAT:
10258 fp_model = ARM_FLOAT_SOFT_FPA;
10259 break;
10260 case EF_ARM_VFP_FLOAT:
10261 fp_model = ARM_FLOAT_VFP;
10262 break;
10263 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10264 fp_model = ARM_FLOAT_SOFT_VFP;
10265 break;
10266 }
10267 }
10268
10269 if (e_flags & EF_ARM_BE8)
10270 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10271
10272 break;
10273
10274 default:
10275 /* Leave it as "auto". */
10276 break;
10277 }
10278 }
10279
10280 /* Check any target description for validity. */
10281 if (tdesc_has_registers (tdesc))
10282 {
10283 /* For most registers we require GDB's default names; but also allow
10284 the numeric names for sp / lr / pc, as a convenience. */
10285 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10286 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10287 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10288
10289 const struct tdesc_feature *feature;
10290 int valid_p;
10291
10292 feature = tdesc_find_feature (tdesc,
10293 "org.gnu.gdb.arm.core");
10294 if (feature == NULL)
10295 {
10296 feature = tdesc_find_feature (tdesc,
10297 "org.gnu.gdb.arm.m-profile");
10298 if (feature == NULL)
10299 return NULL;
10300 else
10301 is_m = true;
10302 }
10303
10304 tdesc_data = tdesc_data_alloc ();
10305
10306 valid_p = 1;
10307 for (i = 0; i < ARM_SP_REGNUM; i++)
10308 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10309 arm_register_names[i]);
10310 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10311 ARM_SP_REGNUM,
10312 arm_sp_names);
10313 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10314 ARM_LR_REGNUM,
10315 arm_lr_names);
10316 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10317 ARM_PC_REGNUM,
10318 arm_pc_names);
10319 if (is_m)
10320 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10321 ARM_PS_REGNUM, "xpsr");
10322 else
10323 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10324 ARM_PS_REGNUM, "cpsr");
10325
10326 if (!valid_p)
10327 return NULL;
10328
10329 if (is_m)
10330 {
10331 feature = tdesc_find_feature (tdesc,
10332 "org.gnu.gdb.arm.m-system");
10333 if (feature != nullptr)
10334 {
10335 /* MSP */
10336 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10337 register_count, "msp");
10338 if (!valid_p)
10339 {
10340 warning (_("M-profile m-system feature is missing required register msp."));
10341 return nullptr;
10342 }
10343 have_m_profile_msp = true;
10344 m_profile_msp_regnum = register_count++;
10345
10346 /* PSP */
10347 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10348 register_count, "psp");
10349 if (!valid_p)
10350 {
10351 warning (_("M-profile m-system feature is missing required register psp."));
10352 return nullptr;
10353 }
10354 m_profile_psp_regnum = register_count++;
10355 }
10356 }
10357
10358 feature = tdesc_find_feature (tdesc,
10359 "org.gnu.gdb.arm.fpa");
10360 if (feature != NULL)
10361 {
10362 valid_p = 1;
10363 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10364 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10365 arm_register_names[i]);
10366 if (!valid_p)
10367 return NULL;
10368 }
10369 else
10370 have_fpa_registers = false;
10371
10372 feature = tdesc_find_feature (tdesc,
10373 "org.gnu.gdb.xscale.iwmmxt");
10374 if (feature != NULL)
10375 {
10376 static const char *const iwmmxt_names[] = {
10377 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10378 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10379 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10380 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10381 };
10382
10383 valid_p = 1;
10384 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10385 valid_p
10386 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10387 iwmmxt_names[i - ARM_WR0_REGNUM]);
10388
10389 /* Check for the control registers, but do not fail if they
10390 are missing. */
10391 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10392 tdesc_numbered_register (feature, tdesc_data.get (), i,
10393 iwmmxt_names[i - ARM_WR0_REGNUM]);
10394
10395 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10396 valid_p
10397 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10398 iwmmxt_names[i - ARM_WR0_REGNUM]);
10399
10400 if (!valid_p)
10401 return NULL;
10402
10403 have_wmmx_registers = true;
10404 }
10405
10406 /* If we have a VFP unit, check whether the single precision registers
10407 are present. If not, then we will synthesize them as pseudo
10408 registers. */
10409 feature = tdesc_find_feature (tdesc,
10410 "org.gnu.gdb.arm.vfp");
10411 if (feature != NULL)
10412 {
10413 static const char *const vfp_double_names[] = {
10414 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10415 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10416 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10417 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10418 };
10419
10420 /* Require the double precision registers. There must be either
10421 16 or 32. */
10422 valid_p = 1;
10423 for (i = 0; i < 32; i++)
10424 {
10425 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10426 ARM_D0_REGNUM + i,
10427 vfp_double_names[i]);
10428 if (!valid_p)
10429 break;
10430 }
10431 if (!valid_p && i == 16)
10432 valid_p = 1;
10433
10434 /* Also require FPSCR. */
10435 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10436 ARM_FPSCR_REGNUM, "fpscr");
10437 if (!valid_p)
10438 return NULL;
10439
10440 have_vfp = true;
10441
10442 if (tdesc_unnumbered_register (feature, "s0") == 0)
10443 have_s_pseudos = true;
10444
10445 vfp_register_count = i;
10446
10447 /* If we have VFP, also check for NEON. The architecture allows
10448 NEON without VFP (integer vector operations only), but GDB
10449 does not support that. */
10450 feature = tdesc_find_feature (tdesc,
10451 "org.gnu.gdb.arm.neon");
10452 if (feature != NULL)
10453 {
10454 /* NEON requires 32 double-precision registers. */
10455 if (i != 32)
10456 return NULL;
10457
10458 /* If there are quad registers defined by the stub, use
10459 their type; otherwise (normally) provide them with
10460 the default type. */
10461 if (tdesc_unnumbered_register (feature, "q0") == 0)
10462 have_q_pseudos = true;
10463 }
10464 }
10465
10466 /* Check for the TLS register feature. */
10467 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10468 if (feature != nullptr)
10469 {
10470 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10471 register_count, "tpidruro");
10472 if (!valid_p)
10473 return nullptr;
10474
10475 tls_regnum = register_count;
10476 register_count++;
10477 }
10478
10479 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10480 MVE (Helium) is an M-profile extension. */
10481 if (is_m)
10482 {
10483 /* Do we have the MVE feature? */
10484 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10485
10486 if (feature != nullptr)
10487 {
10488 /* If we have MVE, we must always have the VPR register. */
10489 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10490 register_count, "vpr");
10491 if (!valid_p)
10492 {
10493 warning (_("MVE feature is missing required register vpr."));
10494 return nullptr;
10495 }
10496
10497 have_mve = true;
10498 mve_vpr_regnum = register_count;
10499 register_count++;
10500
10501 /* We can't have Q pseudo registers available here, as that
10502 would mean we have NEON features, and that is only available
10503 on A and R profiles. */
10504 gdb_assert (!have_q_pseudos);
10505
10506 /* Given we have a M-profile target description, if MVE is
10507 enabled and there are VFP registers, we should have Q
10508 pseudo registers (Q0 ~ Q7). */
10509 if (have_vfp)
10510 have_q_pseudos = true;
10511 }
10512
10513 /* Do we have the ARMv8.1-m PACBTI feature? */
10514 feature = tdesc_find_feature (tdesc,
10515 "org.gnu.gdb.arm.m-profile-pacbti");
10516 if (feature != nullptr)
10517 {
10518 /* By advertising this feature, the target acknowledges the
10519 presence of the ARMv8.1-m PACBTI extensions.
10520
10521 We don't care for any particular registers in this group, so
10522 the target is free to include whatever it deems appropriate.
10523
10524 The expectation is for this feature to include the PAC
10525 keys. */
10526 have_pacbti = true;
10527 }
10528
10529 /* Do we have the Security extension? */
10530 feature = tdesc_find_feature (tdesc,
10531 "org.gnu.gdb.arm.secext");
10532 if (feature != nullptr)
10533 {
10534 /* Secure/Non-secure stack pointers. */
10535 /* MSP_NS */
10536 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10537 register_count, "msp_ns");
10538 if (!valid_p)
10539 {
10540 warning (_("M-profile secext feature is missing required register msp_ns."));
10541 return nullptr;
10542 }
10543 m_profile_msp_ns_regnum = register_count++;
10544
10545 /* PSP_NS */
10546 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10547 register_count, "psp_ns");
10548 if (!valid_p)
10549 {
10550 warning (_("M-profile secext feature is missing required register psp_ns."));
10551 return nullptr;
10552 }
10553 m_profile_psp_ns_regnum = register_count++;
10554
10555 /* MSP_S */
10556 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10557 register_count, "msp_s");
10558 if (!valid_p)
10559 {
10560 warning (_("M-profile secext feature is missing required register msp_s."));
10561 return nullptr;
10562 }
10563 m_profile_msp_s_regnum = register_count++;
10564
10565 /* PSP_S */
10566 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10567 register_count, "psp_s");
10568 if (!valid_p)
10569 {
10570 warning (_("M-profile secext feature is missing required register psp_s."));
10571 return nullptr;
10572 }
10573 m_profile_psp_s_regnum = register_count++;
10574
10575 have_sec_ext = true;
10576 }
10577
10578 }
10579 }
10580
10581 /* If there is already a candidate, use it. */
10582 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10583 best_arch != NULL;
10584 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10585 {
10586 arm_gdbarch_tdep *tdep
10587 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10588
10589 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10590 continue;
10591
10592 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10593 continue;
10594
10595 /* There are various other properties in tdep that we do not
10596 need to check here: those derived from a target description,
10597 since gdbarches with a different target description are
10598 automatically disqualified. */
10599
10600 /* Do check is_m, though, since it might come from the binary. */
10601 if (is_m != tdep->is_m)
10602 continue;
10603
10604 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10605 the binary. */
10606 if (have_pacbti != tdep->have_pacbti)
10607 continue;
10608
10609 /* Found a match. */
10610 break;
10611 }
10612
10613 if (best_arch != NULL)
10614 return best_arch->gdbarch;
10615
10616 gdbarch *gdbarch
10617 = gdbarch_alloc (&info, gdbarch_tdep_up (new arm_gdbarch_tdep));
10618 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10619
10620 /* Record additional information about the architecture we are defining.
10621 These are gdbarch discriminators, like the OSABI. */
10622 tdep->arm_abi = arm_abi;
10623 tdep->fp_model = fp_model;
10624 tdep->is_m = is_m;
10625 tdep->have_sec_ext = have_sec_ext;
10626 tdep->have_fpa_registers = have_fpa_registers;
10627 tdep->have_wmmx_registers = have_wmmx_registers;
10628 gdb_assert (vfp_register_count == 0
10629 || vfp_register_count == 16
10630 || vfp_register_count == 32);
10631 tdep->vfp_register_count = vfp_register_count;
10632 tdep->have_s_pseudos = have_s_pseudos;
10633 tdep->have_q_pseudos = have_q_pseudos;
10634 tdep->have_neon = have_neon;
10635 tdep->tls_regnum = tls_regnum;
10636
10637 /* Adjust the MVE feature settings. */
10638 if (have_mve)
10639 {
10640 tdep->have_mve = true;
10641 tdep->mve_vpr_regnum = mve_vpr_regnum;
10642 }
10643
10644 /* Adjust the PACBTI feature settings. */
10645 tdep->have_pacbti = have_pacbti;
10646
10647 /* Adjust the M-profile stack pointers settings. */
10648 if (have_m_profile_msp)
10649 {
10650 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10651 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10652 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10653 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10654 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10655 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10656 }
10657
10658 arm_register_g_packet_guesses (gdbarch);
10659
10660 /* Breakpoints. */
10661 switch (info.byte_order_for_code)
10662 {
10663 case BFD_ENDIAN_BIG:
10664 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10665 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10666 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10667 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10668
10669 break;
10670
10671 case BFD_ENDIAN_LITTLE:
10672 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10673 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10674 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10675 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10676
10677 break;
10678
10679 default:
10680 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10681 }
10682
10683 /* On ARM targets char defaults to unsigned. */
10684 set_gdbarch_char_signed (gdbarch, 0);
10685
10686 /* wchar_t is unsigned under the AAPCS. */
10687 if (tdep->arm_abi == ARM_ABI_AAPCS)
10688 set_gdbarch_wchar_signed (gdbarch, 0);
10689 else
10690 set_gdbarch_wchar_signed (gdbarch, 1);
10691
10692 /* Compute type alignment. */
10693 set_gdbarch_type_align (gdbarch, arm_type_align);
10694
10695 /* Note: for displaced stepping, this includes the breakpoint, and one word
10696 of additional scratch space. This setting isn't used for anything beside
10697 displaced stepping at present. */
10698 set_gdbarch_displaced_step_buffer_length
10699 (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10700 set_gdbarch_max_insn_length (gdbarch, 4);
10701
10702 /* This should be low enough for everything. */
10703 tdep->lowest_pc = 0x20;
10704 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10705
10706 /* The default, for both APCS and AAPCS, is to return small
10707 structures in registers. */
10708 tdep->struct_return = reg_struct_return;
10709
10710 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10711 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10712
10713 if (is_m)
10714 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10715
10716 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10717
10718 frame_base_set_default (gdbarch, &arm_normal_base);
10719
10720 /* Address manipulation. */
10721 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10722
10723 /* Advance PC across function entry code. */
10724 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10725
10726 /* Detect whether PC is at a point where the stack has been destroyed. */
10727 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10728
10729 /* Skip trampolines. */
10730 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10731
10732 /* The stack grows downward. */
10733 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10734
10735 /* Breakpoint manipulation. */
10736 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10737 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10738 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10739 arm_breakpoint_kind_from_current_state);
10740
10741 /* Information about registers, etc. */
10742 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10743 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10744 set_gdbarch_num_regs (gdbarch, register_count);
10745 set_gdbarch_register_type (gdbarch, arm_register_type);
10746 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10747
10748 /* This "info float" is FPA-specific. Use the generic version if we
10749 do not have FPA. */
10750 if (tdep->have_fpa_registers)
10751 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10752
10753 /* Internal <-> external register number maps. */
10754 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10755 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10756
10757 set_gdbarch_register_name (gdbarch, arm_register_name);
10758
10759 /* Returning results. */
10760 set_gdbarch_return_value_as_value (gdbarch, arm_return_value);
10761
10762 /* Disassembly. */
10763 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10764
10765 /* Minsymbol frobbing. */
10766 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10767 set_gdbarch_coff_make_msymbol_special (gdbarch,
10768 arm_coff_make_msymbol_special);
10769 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10770
10771 /* Thumb-2 IT block support. */
10772 set_gdbarch_adjust_breakpoint_address (gdbarch,
10773 arm_adjust_breakpoint_address);
10774
10775 /* Virtual tables. */
10776 set_gdbarch_vbit_in_delta (gdbarch, 1);
10777
10778 /* Hook in the ABI-specific overrides, if they have been registered. */
10779 gdbarch_init_osabi (info, gdbarch);
10780
10781 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10782
10783 /* Add some default predicates. */
10784 if (is_m)
10785 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10786 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10787 dwarf2_append_unwinders (gdbarch);
10788 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10789 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10790 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10791
10792 /* Now we have tuned the configuration, set a few final things,
10793 based on what the OS ABI has told us. */
10794
10795 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10796 binaries are always marked. */
10797 if (tdep->arm_abi == ARM_ABI_AUTO)
10798 tdep->arm_abi = ARM_ABI_APCS;
10799
10800 /* Watchpoints are not steppable. */
10801 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10802
10803 /* We used to default to FPA for generic ARM, but almost nobody
10804 uses that now, and we now provide a way for the user to force
10805 the model. So default to the most useful variant. */
10806 if (tdep->fp_model == ARM_FLOAT_AUTO)
10807 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10808
10809 if (tdep->jb_pc >= 0)
10810 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10811
10812 /* Floating point sizes and format. */
10813 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10814 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10815 {
10816 set_gdbarch_double_format
10817 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10818 set_gdbarch_long_double_format
10819 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10820 }
10821 else
10822 {
10823 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10824 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10825 }
10826
10827 /* Hook used to decorate frames with signed return addresses, only available
10828 for ARMv8.1-m PACBTI. */
10829 if (is_m && have_pacbti)
10830 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10831
10832 if (tdesc_data != nullptr)
10833 {
10834 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10835
10836 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10837 register_count = gdbarch_num_regs (gdbarch);
10838
10839 /* Override tdesc_register_type to adjust the types of VFP
10840 registers for NEON. */
10841 set_gdbarch_register_type (gdbarch, arm_register_type);
10842 }
10843
10844 /* Initialize the pseudo register data. */
10845 int num_pseudos = 0;
10846 if (tdep->have_s_pseudos)
10847 {
10848 /* VFP single precision pseudo registers (S0~S31). */
10849 tdep->s_pseudo_base = register_count;
10850 tdep->s_pseudo_count = 32;
10851 num_pseudos += tdep->s_pseudo_count;
10852
10853 if (tdep->have_q_pseudos)
10854 {
10855 /* NEON quad precision pseudo registers (Q0~Q15). */
10856 tdep->q_pseudo_base = register_count + num_pseudos;
10857
10858 if (have_neon)
10859 tdep->q_pseudo_count = 16;
10860 else if (have_mve)
10861 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10862
10863 num_pseudos += tdep->q_pseudo_count;
10864 }
10865 }
10866
10867 /* Do we have any MVE pseudo registers? */
10868 if (have_mve)
10869 {
10870 tdep->mve_pseudo_base = register_count + num_pseudos;
10871 tdep->mve_pseudo_count = 1;
10872 num_pseudos += tdep->mve_pseudo_count;
10873 }
10874
10875 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10876 if (have_pacbti)
10877 {
10878 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10879 tdep->pacbti_pseudo_count = 1;
10880 num_pseudos += tdep->pacbti_pseudo_count;
10881 }
10882
10883 /* Set some pseudo register hooks, if we have pseudo registers. */
10884 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10885 {
10886 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10887 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10888 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10889 }
10890
10891 /* Add standard register aliases. We add aliases even for those
10892 names which are used by the current architecture - it's simpler,
10893 and does no harm, since nothing ever lists user registers. */
10894 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10895 user_reg_add (gdbarch, arm_register_aliases[i].name,
10896 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10897
10898 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10899 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10900
10901 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10902
10903 return gdbarch;
10904 }
10905
10906 static void
10907 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10908 {
10909 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10910
10911 if (tdep == NULL)
10912 return;
10913
10914 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10915 (int) tdep->fp_model);
10916 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10917 (int) tdep->have_fpa_registers);
10918 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10919 (int) tdep->have_wmmx_registers);
10920 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10921 (int) tdep->vfp_register_count);
10922 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10923 tdep->have_s_pseudos? "true" : "false");
10924 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10925 (int) tdep->s_pseudo_base);
10926 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10927 (int) tdep->s_pseudo_count);
10928 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10929 tdep->have_q_pseudos? "true" : "false");
10930 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10931 (int) tdep->q_pseudo_base);
10932 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10933 (int) tdep->q_pseudo_count);
10934 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10935 (int) tdep->have_neon);
10936 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10937 tdep->have_mve? "yes" : "no");
10938 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10939 tdep->mve_vpr_regnum);
10940 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10941 tdep->mve_pseudo_base);
10942 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10943 tdep->mve_pseudo_count);
10944 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10945 tdep->m_profile_msp_regnum);
10946 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10947 tdep->m_profile_psp_regnum);
10948 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10949 tdep->m_profile_msp_ns_regnum);
10950 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10951 tdep->m_profile_psp_ns_regnum);
10952 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10953 tdep->m_profile_msp_s_regnum);
10954 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10955 tdep->m_profile_psp_s_regnum);
10956 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10957 (unsigned long) tdep->lowest_pc);
10958 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10959 tdep->have_pacbti? "yes" : "no");
10960 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10961 tdep->pacbti_pseudo_base);
10962 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10963 tdep->pacbti_pseudo_count);
10964 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10965 tdep->is_m? "yes" : "no");
10966 }
10967
10968 #if GDB_SELF_TEST
10969 namespace selftests
10970 {
10971 static void arm_record_test (void);
10972 static void arm_analyze_prologue_test ();
10973 }
10974 #endif
10975
10976 void _initialize_arm_tdep ();
10977 void
10978 _initialize_arm_tdep ()
10979 {
10980 long length;
10981 int i, j;
10982 char regdesc[1024], *rdptr = regdesc;
10983 size_t rest = sizeof (regdesc);
10984
10985 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10986
10987 /* Add ourselves to objfile event chain. */
10988 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10989
10990 /* Register an ELF OS ABI sniffer for ARM binaries. */
10991 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10992 bfd_target_elf_flavour,
10993 arm_elf_osabi_sniffer);
10994
10995 /* Add root prefix command for all "set arm"/"show arm" commands. */
10996 add_setshow_prefix_cmd ("arm", no_class,
10997 _("Various ARM-specific commands."),
10998 _("Various ARM-specific commands."),
10999 &setarmcmdlist, &showarmcmdlist,
11000 &setlist, &showlist);
11001
11002 arm_disassembler_options = xstrdup ("reg-names-std");
11003 const disasm_options_t *disasm_options
11004 = &disassembler_options_arm ()->options;
11005 int num_disassembly_styles = 0;
11006 for (i = 0; disasm_options->name[i] != NULL; i++)
11007 if (startswith (disasm_options->name[i], "reg-names-"))
11008 num_disassembly_styles++;
11009
11010 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
11011 valid_disassembly_styles = XNEWVEC (const char *,
11012 num_disassembly_styles + 1);
11013 for (i = j = 0; disasm_options->name[i] != NULL; i++)
11014 if (startswith (disasm_options->name[i], "reg-names-"))
11015 {
11016 size_t offset = strlen ("reg-names-");
11017 const char *style = disasm_options->name[i];
11018 valid_disassembly_styles[j++] = &style[offset];
11019 if (strcmp (&style[offset], "std") == 0)
11020 disassembly_style = &style[offset];
11021 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
11022 disasm_options->description[i]);
11023 rdptr += length;
11024 rest -= length;
11025 }
11026 /* Mark the end of valid options. */
11027 valid_disassembly_styles[num_disassembly_styles] = NULL;
11028
11029 /* Create the help text. */
11030 std::string helptext = string_printf ("%s%s%s",
11031 _("The valid values are:\n"),
11032 regdesc,
11033 _("The default is \"std\"."));
11034
11035 add_setshow_enum_cmd("disassembler", no_class,
11036 valid_disassembly_styles, &disassembly_style,
11037 _("Set the disassembly style."),
11038 _("Show the disassembly style."),
11039 helptext.c_str (),
11040 set_disassembly_style_sfunc,
11041 show_disassembly_style_sfunc,
11042 &setarmcmdlist, &showarmcmdlist);
11043
11044 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
11045 _("Set usage of ARM 32-bit mode."),
11046 _("Show usage of ARM 32-bit mode."),
11047 _("When off, a 26-bit PC will be used."),
11048 NULL,
11049 NULL, /* FIXME: i18n: Usage of ARM 32-bit
11050 mode is %s. */
11051 &setarmcmdlist, &showarmcmdlist);
11052
11053 /* Add a command to allow the user to force the FPU model. */
11054 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
11055 _("Set the floating point type."),
11056 _("Show the floating point type."),
11057 _("auto - Determine the FP typefrom the OS-ABI.\n\
11058 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
11059 fpa - FPA co-processor (GCC compiled).\n\
11060 softvfp - Software FP with pure-endian doubles.\n\
11061 vfp - VFP co-processor."),
11062 set_fp_model_sfunc, show_fp_model,
11063 &setarmcmdlist, &showarmcmdlist);
11064
11065 /* Add a command to allow the user to force the ABI. */
11066 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
11067 _("Set the ABI."),
11068 _("Show the ABI."),
11069 NULL, arm_set_abi, arm_show_abi,
11070 &setarmcmdlist, &showarmcmdlist);
11071
11072 /* Add two commands to allow the user to force the assumed
11073 execution mode. */
11074 add_setshow_enum_cmd ("fallback-mode", class_support,
11075 arm_mode_strings, &arm_fallback_mode_string,
11076 _("Set the mode assumed when symbols are unavailable."),
11077 _("Show the mode assumed when symbols are unavailable."),
11078 NULL, NULL, arm_show_fallback_mode,
11079 &setarmcmdlist, &showarmcmdlist);
11080 add_setshow_enum_cmd ("force-mode", class_support,
11081 arm_mode_strings, &arm_force_mode_string,
11082 _("Set the mode assumed even when symbols are available."),
11083 _("Show the mode assumed even when symbols are available."),
11084 NULL, NULL, arm_show_force_mode,
11085 &setarmcmdlist, &showarmcmdlist);
11086
11087 /* Add a command to stop triggering security exceptions when
11088 unwinding exception stacks. */
11089 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11090 _("Set usage of non-secure to secure exception stack unwinding."),
11091 _("Show usage of non-secure to secure exception stack unwinding."),
11092 _("When on, the debugger can trigger memory access traps."),
11093 NULL, arm_show_unwind_secure_frames,
11094 &setarmcmdlist, &showarmcmdlist);
11095
11096 /* Debugging flag. */
11097 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11098 _("Set ARM debugging."),
11099 _("Show ARM debugging."),
11100 _("When on, arm-specific debugging is enabled."),
11101 NULL,
11102 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11103 &setdebuglist, &showdebuglist);
11104
11105 #if GDB_SELF_TEST
11106 selftests::register_test ("arm-record", selftests::arm_record_test);
11107 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11108 #endif
11109
11110 }
11111
11112 /* ARM-reversible process record data structures. */
11113
11114 #define ARM_INSN_SIZE_BYTES 4
11115 #define THUMB_INSN_SIZE_BYTES 2
11116 #define THUMB2_INSN_SIZE_BYTES 4
11117
11118
11119 /* Position of the bit within a 32-bit ARM instruction
11120 that defines whether the instruction is a load or store. */
11121 #define INSN_S_L_BIT_NUM 20
11122
11123 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11124 do \
11125 { \
11126 unsigned int reg_len = LENGTH; \
11127 if (reg_len) \
11128 { \
11129 REGS = XNEWVEC (uint32_t, reg_len); \
11130 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11131 } \
11132 } \
11133 while (0)
11134
11135 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11136 do \
11137 { \
11138 unsigned int mem_len = LENGTH; \
11139 if (mem_len) \
11140 { \
11141 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11142 memcpy(&MEMS->len, &RECORD_BUF[0], \
11143 sizeof(struct arm_mem_r) * LENGTH); \
11144 } \
11145 } \
11146 while (0)
11147
11148 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11149 #define INSN_RECORDED(ARM_RECORD) \
11150 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11151
11152 /* ARM memory record structure. */
11153 struct arm_mem_r
11154 {
11155 uint32_t len; /* Record length. */
11156 uint32_t addr; /* Memory address. */
11157 };
11158
11159 /* ARM instruction record contains opcode of current insn
11160 and execution state (before entry to decode_insn()),
11161 contains list of to-be-modified registers and
11162 memory blocks (on return from decode_insn()). */
11163
11164 struct arm_insn_decode_record
11165 {
11166 struct gdbarch *gdbarch;
11167 struct regcache *regcache;
11168 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11169 uint32_t arm_insn; /* Should accommodate thumb. */
11170 uint32_t cond; /* Condition code. */
11171 uint32_t opcode; /* Insn opcode. */
11172 uint32_t decode; /* Insn decode bits. */
11173 uint32_t mem_rec_count; /* No of mem records. */
11174 uint32_t reg_rec_count; /* No of reg records. */
11175 uint32_t *arm_regs; /* Registers to be saved for this record. */
11176 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11177 };
11178
11179
11180 /* Checks ARM SBZ and SBO mandatory fields. */
11181
11182 static int
11183 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11184 {
11185 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11186
11187 if (!len)
11188 return 1;
11189
11190 if (!sbo)
11191 ones = ~ones;
11192
11193 while (ones)
11194 {
11195 if (!(ones & sbo))
11196 {
11197 return 0;
11198 }
11199 ones = ones >> 1;
11200 }
11201 return 1;
11202 }
11203
11204 enum arm_record_result
11205 {
11206 ARM_RECORD_SUCCESS = 0,
11207 ARM_RECORD_FAILURE = 1
11208 };
11209
11210 enum arm_record_strx_t
11211 {
11212 ARM_RECORD_STRH=1,
11213 ARM_RECORD_STRD
11214 };
11215
11216 enum record_type_t
11217 {
11218 ARM_RECORD=1,
11219 THUMB_RECORD,
11220 THUMB2_RECORD
11221 };
11222
11223
11224 static int
11225 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11226 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11227 {
11228
11229 struct regcache *reg_cache = arm_insn_r->regcache;
11230 ULONGEST u_regval[2]= {0};
11231
11232 uint32_t reg_src1 = 0, reg_src2 = 0;
11233 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11234
11235 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11236 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11237
11238 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11239 {
11240 /* 1) Handle misc store, immediate offset. */
11241 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11242 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11243 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11244 regcache_raw_read_unsigned (reg_cache, reg_src1,
11245 &u_regval[0]);
11246 if (ARM_PC_REGNUM == reg_src1)
11247 {
11248 /* If R15 was used as Rn, hence current PC+8. */
11249 u_regval[0] = u_regval[0] + 8;
11250 }
11251 offset_8 = (immed_high << 4) | immed_low;
11252 /* Calculate target store address. */
11253 if (14 == arm_insn_r->opcode)
11254 {
11255 tgt_mem_addr = u_regval[0] + offset_8;
11256 }
11257 else
11258 {
11259 tgt_mem_addr = u_regval[0] - offset_8;
11260 }
11261 if (ARM_RECORD_STRH == str_type)
11262 {
11263 record_buf_mem[0] = 2;
11264 record_buf_mem[1] = tgt_mem_addr;
11265 arm_insn_r->mem_rec_count = 1;
11266 }
11267 else if (ARM_RECORD_STRD == str_type)
11268 {
11269 record_buf_mem[0] = 4;
11270 record_buf_mem[1] = tgt_mem_addr;
11271 record_buf_mem[2] = 4;
11272 record_buf_mem[3] = tgt_mem_addr + 4;
11273 arm_insn_r->mem_rec_count = 2;
11274 }
11275 }
11276 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11277 {
11278 /* 2) Store, register offset. */
11279 /* Get Rm. */
11280 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11281 /* Get Rn. */
11282 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11283 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11284 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11285 if (15 == reg_src2)
11286 {
11287 /* If R15 was used as Rn, hence current PC+8. */
11288 u_regval[0] = u_regval[0] + 8;
11289 }
11290 /* Calculate target store address, Rn +/- Rm, register offset. */
11291 if (12 == arm_insn_r->opcode)
11292 {
11293 tgt_mem_addr = u_regval[0] + u_regval[1];
11294 }
11295 else
11296 {
11297 tgt_mem_addr = u_regval[1] - u_regval[0];
11298 }
11299 if (ARM_RECORD_STRH == str_type)
11300 {
11301 record_buf_mem[0] = 2;
11302 record_buf_mem[1] = tgt_mem_addr;
11303 arm_insn_r->mem_rec_count = 1;
11304 }
11305 else if (ARM_RECORD_STRD == str_type)
11306 {
11307 record_buf_mem[0] = 4;
11308 record_buf_mem[1] = tgt_mem_addr;
11309 record_buf_mem[2] = 4;
11310 record_buf_mem[3] = tgt_mem_addr + 4;
11311 arm_insn_r->mem_rec_count = 2;
11312 }
11313 }
11314 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11315 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11316 {
11317 /* 3) Store, immediate pre-indexed. */
11318 /* 5) Store, immediate post-indexed. */
11319 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11320 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11321 offset_8 = (immed_high << 4) | immed_low;
11322 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11323 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11324 /* Calculate target store address, Rn +/- Rm, register offset. */
11325 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11326 {
11327 tgt_mem_addr = u_regval[0] + offset_8;
11328 }
11329 else
11330 {
11331 tgt_mem_addr = u_regval[0] - offset_8;
11332 }
11333 if (ARM_RECORD_STRH == str_type)
11334 {
11335 record_buf_mem[0] = 2;
11336 record_buf_mem[1] = tgt_mem_addr;
11337 arm_insn_r->mem_rec_count = 1;
11338 }
11339 else if (ARM_RECORD_STRD == str_type)
11340 {
11341 record_buf_mem[0] = 4;
11342 record_buf_mem[1] = tgt_mem_addr;
11343 record_buf_mem[2] = 4;
11344 record_buf_mem[3] = tgt_mem_addr + 4;
11345 arm_insn_r->mem_rec_count = 2;
11346 }
11347 /* Record Rn also as it changes. */
11348 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11349 arm_insn_r->reg_rec_count = 1;
11350 }
11351 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11352 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11353 {
11354 /* 4) Store, register pre-indexed. */
11355 /* 6) Store, register post -indexed. */
11356 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11357 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11358 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11359 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11360 /* Calculate target store address, Rn +/- Rm, register offset. */
11361 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11362 {
11363 tgt_mem_addr = u_regval[0] + u_regval[1];
11364 }
11365 else
11366 {
11367 tgt_mem_addr = u_regval[1] - u_regval[0];
11368 }
11369 if (ARM_RECORD_STRH == str_type)
11370 {
11371 record_buf_mem[0] = 2;
11372 record_buf_mem[1] = tgt_mem_addr;
11373 arm_insn_r->mem_rec_count = 1;
11374 }
11375 else if (ARM_RECORD_STRD == str_type)
11376 {
11377 record_buf_mem[0] = 4;
11378 record_buf_mem[1] = tgt_mem_addr;
11379 record_buf_mem[2] = 4;
11380 record_buf_mem[3] = tgt_mem_addr + 4;
11381 arm_insn_r->mem_rec_count = 2;
11382 }
11383 /* Record Rn also as it changes. */
11384 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11385 arm_insn_r->reg_rec_count = 1;
11386 }
11387 return 0;
11388 }
11389
11390 /* Handling ARM extension space insns. */
11391
11392 static int
11393 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11394 {
11395 int ret = 0; /* Return value: -1:record failure ; 0:success */
11396 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11397 uint32_t record_buf[8], record_buf_mem[8];
11398 uint32_t reg_src1 = 0;
11399 struct regcache *reg_cache = arm_insn_r->regcache;
11400 ULONGEST u_regval = 0;
11401
11402 gdb_assert (!INSN_RECORDED(arm_insn_r));
11403 /* Handle unconditional insn extension space. */
11404
11405 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11406 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11407 if (arm_insn_r->cond)
11408 {
11409 /* PLD has no affect on architectural state, it just affects
11410 the caches. */
11411 if (5 == ((opcode1 & 0xE0) >> 5))
11412 {
11413 /* BLX(1) */
11414 record_buf[0] = ARM_PS_REGNUM;
11415 record_buf[1] = ARM_LR_REGNUM;
11416 arm_insn_r->reg_rec_count = 2;
11417 }
11418 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11419 }
11420
11421
11422 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11423 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11424 {
11425 ret = -1;
11426 /* Undefined instruction on ARM V5; need to handle if later
11427 versions define it. */
11428 }
11429
11430 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11431 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11432 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11433
11434 /* Handle arithmetic insn extension space. */
11435 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11436 && !INSN_RECORDED(arm_insn_r))
11437 {
11438 /* Handle MLA(S) and MUL(S). */
11439 if (in_inclusive_range (insn_op1, 0U, 3U))
11440 {
11441 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11442 record_buf[1] = ARM_PS_REGNUM;
11443 arm_insn_r->reg_rec_count = 2;
11444 }
11445 else if (in_inclusive_range (insn_op1, 4U, 15U))
11446 {
11447 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11448 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11449 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11450 record_buf[2] = ARM_PS_REGNUM;
11451 arm_insn_r->reg_rec_count = 3;
11452 }
11453 }
11454
11455 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11456 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11457 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11458
11459 /* Handle control insn extension space. */
11460
11461 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11462 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11463 {
11464 if (!bit (arm_insn_r->arm_insn,25))
11465 {
11466 if (!bits (arm_insn_r->arm_insn, 4, 7))
11467 {
11468 if ((0 == insn_op1) || (2 == insn_op1))
11469 {
11470 /* MRS. */
11471 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11472 arm_insn_r->reg_rec_count = 1;
11473 }
11474 else if (1 == insn_op1)
11475 {
11476 /* CSPR is going to be changed. */
11477 record_buf[0] = ARM_PS_REGNUM;
11478 arm_insn_r->reg_rec_count = 1;
11479 }
11480 else if (3 == insn_op1)
11481 {
11482 /* SPSR is going to be changed. */
11483 /* We need to get SPSR value, which is yet to be done. */
11484 return -1;
11485 }
11486 }
11487 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11488 {
11489 if (1 == insn_op1)
11490 {
11491 /* BX. */
11492 record_buf[0] = ARM_PS_REGNUM;
11493 arm_insn_r->reg_rec_count = 1;
11494 }
11495 else if (3 == insn_op1)
11496 {
11497 /* CLZ. */
11498 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11499 arm_insn_r->reg_rec_count = 1;
11500 }
11501 }
11502 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11503 {
11504 /* BLX. */
11505 record_buf[0] = ARM_PS_REGNUM;
11506 record_buf[1] = ARM_LR_REGNUM;
11507 arm_insn_r->reg_rec_count = 2;
11508 }
11509 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11510 {
11511 /* QADD, QSUB, QDADD, QDSUB */
11512 record_buf[0] = ARM_PS_REGNUM;
11513 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11514 arm_insn_r->reg_rec_count = 2;
11515 }
11516 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11517 {
11518 /* BKPT. */
11519 record_buf[0] = ARM_PS_REGNUM;
11520 record_buf[1] = ARM_LR_REGNUM;
11521 arm_insn_r->reg_rec_count = 2;
11522
11523 /* Save SPSR also;how? */
11524 return -1;
11525 }
11526 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11527 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11528 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11529 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11530 )
11531 {
11532 if (0 == insn_op1 || 1 == insn_op1)
11533 {
11534 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11535 /* We dont do optimization for SMULW<y> where we
11536 need only Rd. */
11537 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11538 record_buf[1] = ARM_PS_REGNUM;
11539 arm_insn_r->reg_rec_count = 2;
11540 }
11541 else if (2 == insn_op1)
11542 {
11543 /* SMLAL<x><y>. */
11544 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11545 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11546 arm_insn_r->reg_rec_count = 2;
11547 }
11548 else if (3 == insn_op1)
11549 {
11550 /* SMUL<x><y>. */
11551 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11552 arm_insn_r->reg_rec_count = 1;
11553 }
11554 }
11555 }
11556 else
11557 {
11558 /* MSR : immediate form. */
11559 if (1 == insn_op1)
11560 {
11561 /* CSPR is going to be changed. */
11562 record_buf[0] = ARM_PS_REGNUM;
11563 arm_insn_r->reg_rec_count = 1;
11564 }
11565 else if (3 == insn_op1)
11566 {
11567 /* SPSR is going to be changed. */
11568 /* we need to get SPSR value, which is yet to be done */
11569 return -1;
11570 }
11571 }
11572 }
11573
11574 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11575 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11576 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11577
11578 /* Handle load/store insn extension space. */
11579
11580 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11581 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11582 && !INSN_RECORDED(arm_insn_r))
11583 {
11584 /* SWP/SWPB. */
11585 if (0 == insn_op1)
11586 {
11587 /* These insn, changes register and memory as well. */
11588 /* SWP or SWPB insn. */
11589 /* Get memory address given by Rn. */
11590 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11591 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11592 /* SWP insn ?, swaps word. */
11593 if (8 == arm_insn_r->opcode)
11594 {
11595 record_buf_mem[0] = 4;
11596 }
11597 else
11598 {
11599 /* SWPB insn, swaps only byte. */
11600 record_buf_mem[0] = 1;
11601 }
11602 record_buf_mem[1] = u_regval;
11603 arm_insn_r->mem_rec_count = 1;
11604 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11605 arm_insn_r->reg_rec_count = 1;
11606 }
11607 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11608 {
11609 /* STRH. */
11610 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11611 ARM_RECORD_STRH);
11612 }
11613 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11614 {
11615 /* LDRD. */
11616 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11617 record_buf[1] = record_buf[0] + 1;
11618 arm_insn_r->reg_rec_count = 2;
11619 }
11620 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11621 {
11622 /* STRD. */
11623 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11624 ARM_RECORD_STRD);
11625 }
11626 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11627 {
11628 /* LDRH, LDRSB, LDRSH. */
11629 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11630 arm_insn_r->reg_rec_count = 1;
11631 }
11632
11633 }
11634
11635 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11636 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11637 && !INSN_RECORDED(arm_insn_r))
11638 {
11639 ret = -1;
11640 /* Handle coprocessor insn extension space. */
11641 }
11642
11643 /* To be done for ARMv5 and later; as of now we return -1. */
11644 if (-1 == ret)
11645 return ret;
11646
11647 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11648 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11649
11650 return ret;
11651 }
11652
11653 /* Handling opcode 000 insns. */
11654
11655 static int
11656 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11657 {
11658 struct regcache *reg_cache = arm_insn_r->regcache;
11659 uint32_t record_buf[8], record_buf_mem[8];
11660 ULONGEST u_regval[2] = {0};
11661
11662 uint32_t reg_src1 = 0;
11663 uint32_t opcode1 = 0;
11664
11665 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11666 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11667 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11668
11669 if (!((opcode1 & 0x19) == 0x10))
11670 {
11671 /* Data-processing (register) and Data-processing (register-shifted
11672 register */
11673 /* Out of 11 shifter operands mode, all the insn modifies destination
11674 register, which is specified by 13-16 decode. */
11675 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11676 record_buf[1] = ARM_PS_REGNUM;
11677 arm_insn_r->reg_rec_count = 2;
11678 }
11679 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11680 {
11681 /* Miscellaneous instructions */
11682
11683 if (3 == arm_insn_r->decode && 0x12 == opcode1
11684 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11685 {
11686 /* Handle BLX, branch and link/exchange. */
11687 if (9 == arm_insn_r->opcode)
11688 {
11689 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11690 and R14 stores the return address. */
11691 record_buf[0] = ARM_PS_REGNUM;
11692 record_buf[1] = ARM_LR_REGNUM;
11693 arm_insn_r->reg_rec_count = 2;
11694 }
11695 }
11696 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11697 {
11698 /* Handle enhanced software breakpoint insn, BKPT. */
11699 /* CPSR is changed to be executed in ARM state, disabling normal
11700 interrupts, entering abort mode. */
11701 /* According to high vector configuration PC is set. */
11702 /* user hit breakpoint and type reverse, in
11703 that case, we need to go back with previous CPSR and
11704 Program Counter. */
11705 record_buf[0] = ARM_PS_REGNUM;
11706 record_buf[1] = ARM_LR_REGNUM;
11707 arm_insn_r->reg_rec_count = 2;
11708
11709 /* Save SPSR also; how? */
11710 return -1;
11711 }
11712 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11713 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11714 {
11715 /* Handle BX, branch and link/exchange. */
11716 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11717 record_buf[0] = ARM_PS_REGNUM;
11718 arm_insn_r->reg_rec_count = 1;
11719 }
11720 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11721 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11722 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11723 {
11724 /* Count leading zeros: CLZ. */
11725 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11726 arm_insn_r->reg_rec_count = 1;
11727 }
11728 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11729 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11730 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11731 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11732 {
11733 /* Handle MRS insn. */
11734 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11735 arm_insn_r->reg_rec_count = 1;
11736 }
11737 }
11738 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11739 {
11740 /* Multiply and multiply-accumulate */
11741
11742 /* Handle multiply instructions. */
11743 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11744 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11745 {
11746 /* Handle MLA and MUL. */
11747 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11748 record_buf[1] = ARM_PS_REGNUM;
11749 arm_insn_r->reg_rec_count = 2;
11750 }
11751 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11752 {
11753 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11754 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11755 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11756 record_buf[2] = ARM_PS_REGNUM;
11757 arm_insn_r->reg_rec_count = 3;
11758 }
11759 }
11760 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11761 {
11762 /* Synchronization primitives */
11763
11764 /* Handling SWP, SWPB. */
11765 /* These insn, changes register and memory as well. */
11766 /* SWP or SWPB insn. */
11767
11768 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11769 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11770 /* SWP insn ?, swaps word. */
11771 if (8 == arm_insn_r->opcode)
11772 {
11773 record_buf_mem[0] = 4;
11774 }
11775 else
11776 {
11777 /* SWPB insn, swaps only byte. */
11778 record_buf_mem[0] = 1;
11779 }
11780 record_buf_mem[1] = u_regval[0];
11781 arm_insn_r->mem_rec_count = 1;
11782 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11783 arm_insn_r->reg_rec_count = 1;
11784 }
11785 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11786 || 15 == arm_insn_r->decode)
11787 {
11788 if ((opcode1 & 0x12) == 2)
11789 {
11790 /* Extra load/store (unprivileged) */
11791 return -1;
11792 }
11793 else
11794 {
11795 /* Extra load/store */
11796 switch (bits (arm_insn_r->arm_insn, 5, 6))
11797 {
11798 case 1:
11799 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11800 {
11801 /* STRH (register), STRH (immediate) */
11802 arm_record_strx (arm_insn_r, &record_buf[0],
11803 &record_buf_mem[0], ARM_RECORD_STRH);
11804 }
11805 else if ((opcode1 & 0x05) == 0x1)
11806 {
11807 /* LDRH (register) */
11808 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11809 arm_insn_r->reg_rec_count = 1;
11810
11811 if (bit (arm_insn_r->arm_insn, 21))
11812 {
11813 /* Write back to Rn. */
11814 record_buf[arm_insn_r->reg_rec_count++]
11815 = bits (arm_insn_r->arm_insn, 16, 19);
11816 }
11817 }
11818 else if ((opcode1 & 0x05) == 0x5)
11819 {
11820 /* LDRH (immediate), LDRH (literal) */
11821 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11822
11823 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11824 arm_insn_r->reg_rec_count = 1;
11825
11826 if (rn != 15)
11827 {
11828 /*LDRH (immediate) */
11829 if (bit (arm_insn_r->arm_insn, 21))
11830 {
11831 /* Write back to Rn. */
11832 record_buf[arm_insn_r->reg_rec_count++] = rn;
11833 }
11834 }
11835 }
11836 else
11837 return -1;
11838 break;
11839 case 2:
11840 if ((opcode1 & 0x05) == 0x0)
11841 {
11842 /* LDRD (register) */
11843 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11844 record_buf[1] = record_buf[0] + 1;
11845 arm_insn_r->reg_rec_count = 2;
11846
11847 if (bit (arm_insn_r->arm_insn, 21))
11848 {
11849 /* Write back to Rn. */
11850 record_buf[arm_insn_r->reg_rec_count++]
11851 = bits (arm_insn_r->arm_insn, 16, 19);
11852 }
11853 }
11854 else if ((opcode1 & 0x05) == 0x1)
11855 {
11856 /* LDRSB (register) */
11857 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11858 arm_insn_r->reg_rec_count = 1;
11859
11860 if (bit (arm_insn_r->arm_insn, 21))
11861 {
11862 /* Write back to Rn. */
11863 record_buf[arm_insn_r->reg_rec_count++]
11864 = bits (arm_insn_r->arm_insn, 16, 19);
11865 }
11866 }
11867 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11868 {
11869 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11870 LDRSB (literal) */
11871 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11872
11873 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11874 arm_insn_r->reg_rec_count = 1;
11875
11876 if (rn != 15)
11877 {
11878 /*LDRD (immediate), LDRSB (immediate) */
11879 if (bit (arm_insn_r->arm_insn, 21))
11880 {
11881 /* Write back to Rn. */
11882 record_buf[arm_insn_r->reg_rec_count++] = rn;
11883 }
11884 }
11885 }
11886 else
11887 return -1;
11888 break;
11889 case 3:
11890 if ((opcode1 & 0x05) == 0x0)
11891 {
11892 /* STRD (register) */
11893 arm_record_strx (arm_insn_r, &record_buf[0],
11894 &record_buf_mem[0], ARM_RECORD_STRD);
11895 }
11896 else if ((opcode1 & 0x05) == 0x1)
11897 {
11898 /* LDRSH (register) */
11899 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11900 arm_insn_r->reg_rec_count = 1;
11901
11902 if (bit (arm_insn_r->arm_insn, 21))
11903 {
11904 /* Write back to Rn. */
11905 record_buf[arm_insn_r->reg_rec_count++]
11906 = bits (arm_insn_r->arm_insn, 16, 19);
11907 }
11908 }
11909 else if ((opcode1 & 0x05) == 0x4)
11910 {
11911 /* STRD (immediate) */
11912 arm_record_strx (arm_insn_r, &record_buf[0],
11913 &record_buf_mem[0], ARM_RECORD_STRD);
11914 }
11915 else if ((opcode1 & 0x05) == 0x5)
11916 {
11917 /* LDRSH (immediate), LDRSH (literal) */
11918 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11919 arm_insn_r->reg_rec_count = 1;
11920
11921 if (bit (arm_insn_r->arm_insn, 21))
11922 {
11923 /* Write back to Rn. */
11924 record_buf[arm_insn_r->reg_rec_count++]
11925 = bits (arm_insn_r->arm_insn, 16, 19);
11926 }
11927 }
11928 else
11929 return -1;
11930 break;
11931 default:
11932 return -1;
11933 }
11934 }
11935 }
11936 else
11937 {
11938 return -1;
11939 }
11940
11941 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11942 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11943 return 0;
11944 }
11945
11946 /* Handling opcode 001 insns. */
11947
11948 static int
11949 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11950 {
11951 uint32_t record_buf[8], record_buf_mem[8];
11952
11953 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11954 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11955
11956 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11957 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11958 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11959 )
11960 {
11961 /* Handle MSR insn. */
11962 if (9 == arm_insn_r->opcode)
11963 {
11964 /* CSPR is going to be changed. */
11965 record_buf[0] = ARM_PS_REGNUM;
11966 arm_insn_r->reg_rec_count = 1;
11967 }
11968 else
11969 {
11970 /* SPSR is going to be changed. */
11971 }
11972 }
11973 else if (arm_insn_r->opcode <= 15)
11974 {
11975 /* Normal data processing insns. */
11976 /* Out of 11 shifter operands mode, all the insn modifies destination
11977 register, which is specified by 13-16 decode. */
11978 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11979 record_buf[1] = ARM_PS_REGNUM;
11980 arm_insn_r->reg_rec_count = 2;
11981 }
11982 else
11983 {
11984 return -1;
11985 }
11986
11987 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11988 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11989 return 0;
11990 }
11991
11992 static int
11993 arm_record_media (arm_insn_decode_record *arm_insn_r)
11994 {
11995 uint32_t record_buf[8];
11996
11997 switch (bits (arm_insn_r->arm_insn, 22, 24))
11998 {
11999 case 0:
12000 /* Parallel addition and subtraction, signed */
12001 case 1:
12002 /* Parallel addition and subtraction, unsigned */
12003 case 2:
12004 case 3:
12005 /* Packing, unpacking, saturation and reversal */
12006 {
12007 int rd = bits (arm_insn_r->arm_insn, 12, 15);
12008
12009 record_buf[arm_insn_r->reg_rec_count++] = rd;
12010 }
12011 break;
12012
12013 case 4:
12014 case 5:
12015 /* Signed multiplies */
12016 {
12017 int rd = bits (arm_insn_r->arm_insn, 16, 19);
12018 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
12019
12020 record_buf[arm_insn_r->reg_rec_count++] = rd;
12021 if (op1 == 0x0)
12022 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12023 else if (op1 == 0x4)
12024 record_buf[arm_insn_r->reg_rec_count++]
12025 = bits (arm_insn_r->arm_insn, 12, 15);
12026 }
12027 break;
12028
12029 case 6:
12030 {
12031 if (bit (arm_insn_r->arm_insn, 21)
12032 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
12033 {
12034 /* SBFX */
12035 record_buf[arm_insn_r->reg_rec_count++]
12036 = bits (arm_insn_r->arm_insn, 12, 15);
12037 }
12038 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
12039 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
12040 {
12041 /* USAD8 and USADA8 */
12042 record_buf[arm_insn_r->reg_rec_count++]
12043 = bits (arm_insn_r->arm_insn, 16, 19);
12044 }
12045 }
12046 break;
12047
12048 case 7:
12049 {
12050 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
12051 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
12052 {
12053 /* Permanently UNDEFINED */
12054 return -1;
12055 }
12056 else
12057 {
12058 /* BFC, BFI and UBFX */
12059 record_buf[arm_insn_r->reg_rec_count++]
12060 = bits (arm_insn_r->arm_insn, 12, 15);
12061 }
12062 }
12063 break;
12064
12065 default:
12066 return -1;
12067 }
12068
12069 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12070
12071 return 0;
12072 }
12073
12074 /* Handle ARM mode instructions with opcode 010. */
12075
12076 static int
12077 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
12078 {
12079 struct regcache *reg_cache = arm_insn_r->regcache;
12080
12081 uint32_t reg_base , reg_dest;
12082 uint32_t offset_12, tgt_mem_addr;
12083 uint32_t record_buf[8], record_buf_mem[8];
12084 unsigned char wback;
12085 ULONGEST u_regval;
12086
12087 /* Calculate wback. */
12088 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12089 || (bit (arm_insn_r->arm_insn, 21) == 1);
12090
12091 arm_insn_r->reg_rec_count = 0;
12092 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12093
12094 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12095 {
12096 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12097 and LDRT. */
12098
12099 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12100 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12101
12102 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12103 preceeds a LDR instruction having R15 as reg_base, it
12104 emulates a branch and link instruction, and hence we need to save
12105 CPSR and PC as well. */
12106 if (ARM_PC_REGNUM == reg_dest)
12107 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12108
12109 /* If wback is true, also save the base register, which is going to be
12110 written to. */
12111 if (wback)
12112 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12113 }
12114 else
12115 {
12116 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12117
12118 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12119 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12120
12121 /* Handle bit U. */
12122 if (bit (arm_insn_r->arm_insn, 23))
12123 {
12124 /* U == 1: Add the offset. */
12125 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12126 }
12127 else
12128 {
12129 /* U == 0: subtract the offset. */
12130 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12131 }
12132
12133 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12134 bytes. */
12135 if (bit (arm_insn_r->arm_insn, 22))
12136 {
12137 /* STRB and STRBT: 1 byte. */
12138 record_buf_mem[0] = 1;
12139 }
12140 else
12141 {
12142 /* STR and STRT: 4 bytes. */
12143 record_buf_mem[0] = 4;
12144 }
12145
12146 /* Handle bit P. */
12147 if (bit (arm_insn_r->arm_insn, 24))
12148 record_buf_mem[1] = tgt_mem_addr;
12149 else
12150 record_buf_mem[1] = (uint32_t) u_regval;
12151
12152 arm_insn_r->mem_rec_count = 1;
12153
12154 /* If wback is true, also save the base register, which is going to be
12155 written to. */
12156 if (wback)
12157 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12158 }
12159
12160 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12161 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12162 return 0;
12163 }
12164
12165 /* Handling opcode 011 insns. */
12166
12167 static int
12168 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12169 {
12170 struct regcache *reg_cache = arm_insn_r->regcache;
12171
12172 uint32_t shift_imm = 0;
12173 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12174 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12175 uint32_t record_buf[8], record_buf_mem[8];
12176
12177 LONGEST s_word;
12178 ULONGEST u_regval[2];
12179
12180 if (bit (arm_insn_r->arm_insn, 4))
12181 return arm_record_media (arm_insn_r);
12182
12183 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12184 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12185
12186 /* Handle enhanced store insns and LDRD DSP insn,
12187 order begins according to addressing modes for store insns
12188 STRH insn. */
12189
12190 /* LDR or STR? */
12191 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12192 {
12193 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12194 /* LDR insn has a capability to do branching, if
12195 MOV LR, PC is preceded by LDR insn having Rn as R15
12196 in that case, it emulates branch and link insn, and hence we
12197 need to save CSPR and PC as well. */
12198 if (15 != reg_dest)
12199 {
12200 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12201 arm_insn_r->reg_rec_count = 1;
12202 }
12203 else
12204 {
12205 record_buf[0] = reg_dest;
12206 record_buf[1] = ARM_PS_REGNUM;
12207 arm_insn_r->reg_rec_count = 2;
12208 }
12209 }
12210 else
12211 {
12212 if (! bits (arm_insn_r->arm_insn, 4, 11))
12213 {
12214 /* Store insn, register offset and register pre-indexed,
12215 register post-indexed. */
12216 /* Get Rm. */
12217 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12218 /* Get Rn. */
12219 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12220 regcache_raw_read_unsigned (reg_cache, reg_src1
12221 , &u_regval[0]);
12222 regcache_raw_read_unsigned (reg_cache, reg_src2
12223 , &u_regval[1]);
12224 if (15 == reg_src2)
12225 {
12226 /* If R15 was used as Rn, hence current PC+8. */
12227 /* Pre-indexed mode doesnt reach here ; illegal insn. */
12228 u_regval[0] = u_regval[0] + 8;
12229 }
12230 /* Calculate target store address, Rn +/- Rm, register offset. */
12231 /* U == 1. */
12232 if (bit (arm_insn_r->arm_insn, 23))
12233 {
12234 tgt_mem_addr = u_regval[0] + u_regval[1];
12235 }
12236 else
12237 {
12238 tgt_mem_addr = u_regval[1] - u_regval[0];
12239 }
12240
12241 switch (arm_insn_r->opcode)
12242 {
12243 /* STR. */
12244 case 8:
12245 case 12:
12246 /* STR. */
12247 case 9:
12248 case 13:
12249 /* STRT. */
12250 case 1:
12251 case 5:
12252 /* STR. */
12253 case 0:
12254 case 4:
12255 record_buf_mem[0] = 4;
12256 break;
12257
12258 /* STRB. */
12259 case 10:
12260 case 14:
12261 /* STRB. */
12262 case 11:
12263 case 15:
12264 /* STRBT. */
12265 case 3:
12266 case 7:
12267 /* STRB. */
12268 case 2:
12269 case 6:
12270 record_buf_mem[0] = 1;
12271 break;
12272
12273 default:
12274 gdb_assert_not_reached ("no decoding pattern found");
12275 break;
12276 }
12277 record_buf_mem[1] = tgt_mem_addr;
12278 arm_insn_r->mem_rec_count = 1;
12279
12280 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12281 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12282 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12283 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12284 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12285 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12286 )
12287 {
12288 /* Rn is going to be changed in pre-indexed mode and
12289 post-indexed mode as well. */
12290 record_buf[0] = reg_src2;
12291 arm_insn_r->reg_rec_count = 1;
12292 }
12293 }
12294 else
12295 {
12296 /* Store insn, scaled register offset; scaled pre-indexed. */
12297 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12298 /* Get Rm. */
12299 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12300 /* Get Rn. */
12301 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12302 /* Get shift_imm. */
12303 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12304 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12305 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12306 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12307 /* Offset_12 used as shift. */
12308 switch (offset_12)
12309 {
12310 case 0:
12311 /* Offset_12 used as index. */
12312 offset_12 = u_regval[0] << shift_imm;
12313 break;
12314
12315 case 1:
12316 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
12317 break;
12318
12319 case 2:
12320 if (!shift_imm)
12321 {
12322 if (bit (u_regval[0], 31))
12323 {
12324 offset_12 = 0xFFFFFFFF;
12325 }
12326 else
12327 {
12328 offset_12 = 0;
12329 }
12330 }
12331 else
12332 {
12333 /* This is arithmetic shift. */
12334 offset_12 = s_word >> shift_imm;
12335 }
12336 break;
12337
12338 case 3:
12339 if (!shift_imm)
12340 {
12341 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12342 &u_regval[1]);
12343 /* Get C flag value and shift it by 31. */
12344 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12345 | (u_regval[0]) >> 1);
12346 }
12347 else
12348 {
12349 offset_12 = (u_regval[0] >> shift_imm) \
12350 | (u_regval[0] <<
12351 (sizeof(uint32_t) - shift_imm));
12352 }
12353 break;
12354
12355 default:
12356 gdb_assert_not_reached ("no decoding pattern found");
12357 break;
12358 }
12359
12360 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12361 /* bit U set. */
12362 if (bit (arm_insn_r->arm_insn, 23))
12363 {
12364 tgt_mem_addr = u_regval[1] + offset_12;
12365 }
12366 else
12367 {
12368 tgt_mem_addr = u_regval[1] - offset_12;
12369 }
12370
12371 switch (arm_insn_r->opcode)
12372 {
12373 /* STR. */
12374 case 8:
12375 case 12:
12376 /* STR. */
12377 case 9:
12378 case 13:
12379 /* STRT. */
12380 case 1:
12381 case 5:
12382 /* STR. */
12383 case 0:
12384 case 4:
12385 record_buf_mem[0] = 4;
12386 break;
12387
12388 /* STRB. */
12389 case 10:
12390 case 14:
12391 /* STRB. */
12392 case 11:
12393 case 15:
12394 /* STRBT. */
12395 case 3:
12396 case 7:
12397 /* STRB. */
12398 case 2:
12399 case 6:
12400 record_buf_mem[0] = 1;
12401 break;
12402
12403 default:
12404 gdb_assert_not_reached ("no decoding pattern found");
12405 break;
12406 }
12407 record_buf_mem[1] = tgt_mem_addr;
12408 arm_insn_r->mem_rec_count = 1;
12409
12410 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12411 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12412 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12413 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12414 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12415 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12416 )
12417 {
12418 /* Rn is going to be changed in register scaled pre-indexed
12419 mode,and scaled post indexed mode. */
12420 record_buf[0] = reg_src2;
12421 arm_insn_r->reg_rec_count = 1;
12422 }
12423 }
12424 }
12425
12426 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12427 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12428 return 0;
12429 }
12430
12431 /* Handle ARM mode instructions with opcode 100. */
12432
12433 static int
12434 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12435 {
12436 struct regcache *reg_cache = arm_insn_r->regcache;
12437 uint32_t register_count = 0, register_bits;
12438 uint32_t reg_base, addr_mode;
12439 uint32_t record_buf[24], record_buf_mem[48];
12440 uint32_t wback;
12441 ULONGEST u_regval;
12442
12443 /* Fetch the list of registers. */
12444 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12445 arm_insn_r->reg_rec_count = 0;
12446
12447 /* Fetch the base register that contains the address we are loading data
12448 to. */
12449 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12450
12451 /* Calculate wback. */
12452 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12453
12454 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12455 {
12456 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12457
12458 /* Find out which registers are going to be loaded from memory. */
12459 while (register_bits)
12460 {
12461 if (register_bits & 0x00000001)
12462 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12463 register_bits = register_bits >> 1;
12464 register_count++;
12465 }
12466
12467
12468 /* If wback is true, also save the base register, which is going to be
12469 written to. */
12470 if (wback)
12471 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12472
12473 /* Save the CPSR register. */
12474 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12475 }
12476 else
12477 {
12478 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12479
12480 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12481
12482 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12483
12484 /* Find out how many registers are going to be stored to memory. */
12485 while (register_bits)
12486 {
12487 if (register_bits & 0x00000001)
12488 register_count++;
12489 register_bits = register_bits >> 1;
12490 }
12491
12492 switch (addr_mode)
12493 {
12494 /* STMDA (STMED): Decrement after. */
12495 case 0:
12496 record_buf_mem[1] = (uint32_t) u_regval
12497 - register_count * ARM_INT_REGISTER_SIZE + 4;
12498 break;
12499 /* STM (STMIA, STMEA): Increment after. */
12500 case 1:
12501 record_buf_mem[1] = (uint32_t) u_regval;
12502 break;
12503 /* STMDB (STMFD): Decrement before. */
12504 case 2:
12505 record_buf_mem[1] = (uint32_t) u_regval
12506 - register_count * ARM_INT_REGISTER_SIZE;
12507 break;
12508 /* STMIB (STMFA): Increment before. */
12509 case 3:
12510 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12511 break;
12512 default:
12513 gdb_assert_not_reached ("no decoding pattern found");
12514 break;
12515 }
12516
12517 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12518 arm_insn_r->mem_rec_count = 1;
12519
12520 /* If wback is true, also save the base register, which is going to be
12521 written to. */
12522 if (wback)
12523 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12524 }
12525
12526 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12527 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12528 return 0;
12529 }
12530
12531 /* Handling opcode 101 insns. */
12532
12533 static int
12534 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12535 {
12536 uint32_t record_buf[8];
12537
12538 /* Handle B, BL, BLX(1) insns. */
12539 /* B simply branches so we do nothing here. */
12540 /* Note: BLX(1) doesnt fall here but instead it falls into
12541 extension space. */
12542 if (bit (arm_insn_r->arm_insn, 24))
12543 {
12544 record_buf[0] = ARM_LR_REGNUM;
12545 arm_insn_r->reg_rec_count = 1;
12546 }
12547
12548 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12549
12550 return 0;
12551 }
12552
12553 static int
12554 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12555 {
12556 gdb_printf (gdb_stderr,
12557 _("Process record does not support instruction "
12558 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12559 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12560
12561 return -1;
12562 }
12563
12564 /* Record handler for vector data transfer instructions. */
12565
12566 static int
12567 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12568 {
12569 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12570 uint32_t record_buf[4];
12571
12572 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12573 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12574 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12575 bit_l = bit (arm_insn_r->arm_insn, 20);
12576 bit_c = bit (arm_insn_r->arm_insn, 8);
12577
12578 /* Handle VMOV instruction. */
12579 if (bit_l && bit_c)
12580 {
12581 record_buf[0] = reg_t;
12582 arm_insn_r->reg_rec_count = 1;
12583 }
12584 else if (bit_l && !bit_c)
12585 {
12586 /* Handle VMOV instruction. */
12587 if (bits_a == 0x00)
12588 {
12589 record_buf[0] = reg_t;
12590 arm_insn_r->reg_rec_count = 1;
12591 }
12592 /* Handle VMRS instruction. */
12593 else if (bits_a == 0x07)
12594 {
12595 if (reg_t == 15)
12596 reg_t = ARM_PS_REGNUM;
12597
12598 record_buf[0] = reg_t;
12599 arm_insn_r->reg_rec_count = 1;
12600 }
12601 }
12602 else if (!bit_l && !bit_c)
12603 {
12604 /* Handle VMOV instruction. */
12605 if (bits_a == 0x00)
12606 {
12607 record_buf[0] = ARM_D0_REGNUM + reg_v;
12608
12609 arm_insn_r->reg_rec_count = 1;
12610 }
12611 /* Handle VMSR instruction. */
12612 else if (bits_a == 0x07)
12613 {
12614 record_buf[0] = ARM_FPSCR_REGNUM;
12615 arm_insn_r->reg_rec_count = 1;
12616 }
12617 }
12618 else if (!bit_l && bit_c)
12619 {
12620 /* Handle VMOV instruction. */
12621 if (!(bits_a & 0x04))
12622 {
12623 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12624 + ARM_D0_REGNUM;
12625 arm_insn_r->reg_rec_count = 1;
12626 }
12627 /* Handle VDUP instruction. */
12628 else
12629 {
12630 if (bit (arm_insn_r->arm_insn, 21))
12631 {
12632 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12633 record_buf[0] = reg_v + ARM_D0_REGNUM;
12634 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12635 arm_insn_r->reg_rec_count = 2;
12636 }
12637 else
12638 {
12639 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12640 record_buf[0] = reg_v + ARM_D0_REGNUM;
12641 arm_insn_r->reg_rec_count = 1;
12642 }
12643 }
12644 }
12645
12646 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12647 return 0;
12648 }
12649
12650 /* Record handler for extension register load/store instructions. */
12651
12652 static int
12653 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12654 {
12655 uint32_t opcode, single_reg;
12656 uint8_t op_vldm_vstm;
12657 uint32_t record_buf[8], record_buf_mem[128];
12658 ULONGEST u_regval = 0;
12659
12660 struct regcache *reg_cache = arm_insn_r->regcache;
12661
12662 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12663 single_reg = !bit (arm_insn_r->arm_insn, 8);
12664 op_vldm_vstm = opcode & 0x1b;
12665
12666 /* Handle VMOV instructions. */
12667 if ((opcode & 0x1e) == 0x04)
12668 {
12669 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12670 {
12671 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12672 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12673 arm_insn_r->reg_rec_count = 2;
12674 }
12675 else
12676 {
12677 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12678 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12679
12680 if (single_reg)
12681 {
12682 /* The first S register number m is REG_M:M (M is bit 5),
12683 the corresponding D register number is REG_M:M / 2, which
12684 is REG_M. */
12685 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12686 /* The second S register number is REG_M:M + 1, the
12687 corresponding D register number is (REG_M:M + 1) / 2.
12688 IOW, if bit M is 1, the first and second S registers
12689 are mapped to different D registers, otherwise, they are
12690 in the same D register. */
12691 if (bit_m)
12692 {
12693 record_buf[arm_insn_r->reg_rec_count++]
12694 = ARM_D0_REGNUM + reg_m + 1;
12695 }
12696 }
12697 else
12698 {
12699 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12700 arm_insn_r->reg_rec_count = 1;
12701 }
12702 }
12703 }
12704 /* Handle VSTM and VPUSH instructions. */
12705 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12706 || op_vldm_vstm == 0x12)
12707 {
12708 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12709 uint32_t memory_index = 0;
12710
12711 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12712 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12713 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12714 imm_off32 = imm_off8 << 2;
12715 memory_count = imm_off8;
12716
12717 if (bit (arm_insn_r->arm_insn, 23))
12718 start_address = u_regval;
12719 else
12720 start_address = u_regval - imm_off32;
12721
12722 if (bit (arm_insn_r->arm_insn, 21))
12723 {
12724 record_buf[0] = reg_rn;
12725 arm_insn_r->reg_rec_count = 1;
12726 }
12727
12728 while (memory_count > 0)
12729 {
12730 if (single_reg)
12731 {
12732 record_buf_mem[memory_index] = 4;
12733 record_buf_mem[memory_index + 1] = start_address;
12734 start_address = start_address + 4;
12735 memory_index = memory_index + 2;
12736 }
12737 else
12738 {
12739 record_buf_mem[memory_index] = 4;
12740 record_buf_mem[memory_index + 1] = start_address;
12741 record_buf_mem[memory_index + 2] = 4;
12742 record_buf_mem[memory_index + 3] = start_address + 4;
12743 start_address = start_address + 8;
12744 memory_index = memory_index + 4;
12745 }
12746 memory_count--;
12747 }
12748 arm_insn_r->mem_rec_count = (memory_index >> 1);
12749 }
12750 /* Handle VLDM instructions. */
12751 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12752 || op_vldm_vstm == 0x13)
12753 {
12754 uint32_t reg_count, reg_vd;
12755 uint32_t reg_index = 0;
12756 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12757
12758 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12759 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12760
12761 /* REG_VD is the first D register number. If the instruction
12762 loads memory to S registers (SINGLE_REG is TRUE), the register
12763 number is (REG_VD << 1 | bit D), so the corresponding D
12764 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12765 if (!single_reg)
12766 reg_vd = reg_vd | (bit_d << 4);
12767
12768 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12769 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12770
12771 /* If the instruction loads memory to D register, REG_COUNT should
12772 be divided by 2, according to the ARM Architecture Reference
12773 Manual. If the instruction loads memory to S register, divide by
12774 2 as well because two S registers are mapped to D register. */
12775 reg_count = reg_count / 2;
12776 if (single_reg && bit_d)
12777 {
12778 /* Increase the register count if S register list starts from
12779 an odd number (bit d is one). */
12780 reg_count++;
12781 }
12782
12783 while (reg_count > 0)
12784 {
12785 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12786 reg_count--;
12787 }
12788 arm_insn_r->reg_rec_count = reg_index;
12789 }
12790 /* VSTR Vector store register. */
12791 else if ((opcode & 0x13) == 0x10)
12792 {
12793 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12794 uint32_t memory_index = 0;
12795
12796 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12797 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12798 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12799 imm_off32 = imm_off8 << 2;
12800
12801 if (bit (arm_insn_r->arm_insn, 23))
12802 start_address = u_regval + imm_off32;
12803 else
12804 start_address = u_regval - imm_off32;
12805
12806 if (single_reg)
12807 {
12808 record_buf_mem[memory_index] = 4;
12809 record_buf_mem[memory_index + 1] = start_address;
12810 arm_insn_r->mem_rec_count = 1;
12811 }
12812 else
12813 {
12814 record_buf_mem[memory_index] = 4;
12815 record_buf_mem[memory_index + 1] = start_address;
12816 record_buf_mem[memory_index + 2] = 4;
12817 record_buf_mem[memory_index + 3] = start_address + 4;
12818 arm_insn_r->mem_rec_count = 2;
12819 }
12820 }
12821 /* VLDR Vector load register. */
12822 else if ((opcode & 0x13) == 0x11)
12823 {
12824 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12825
12826 if (!single_reg)
12827 {
12828 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12829 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12830 }
12831 else
12832 {
12833 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12834 /* Record register D rather than pseudo register S. */
12835 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12836 }
12837 arm_insn_r->reg_rec_count = 1;
12838 }
12839
12840 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12841 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12842 return 0;
12843 }
12844
12845 /* Record handler for arm/thumb mode VFP data processing instructions. */
12846
12847 static int
12848 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12849 {
12850 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12851 uint32_t record_buf[4];
12852 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12853 enum insn_types curr_insn_type = INSN_INV;
12854
12855 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12856 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12857 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12858 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12859 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12860 bit_d = bit (arm_insn_r->arm_insn, 22);
12861 /* Mask off the "D" bit. */
12862 opc1 = opc1 & ~0x04;
12863
12864 /* Handle VMLA, VMLS. */
12865 if (opc1 == 0x00)
12866 {
12867 if (bit (arm_insn_r->arm_insn, 10))
12868 {
12869 if (bit (arm_insn_r->arm_insn, 6))
12870 curr_insn_type = INSN_T0;
12871 else
12872 curr_insn_type = INSN_T1;
12873 }
12874 else
12875 {
12876 if (dp_op_sz)
12877 curr_insn_type = INSN_T1;
12878 else
12879 curr_insn_type = INSN_T2;
12880 }
12881 }
12882 /* Handle VNMLA, VNMLS, VNMUL. */
12883 else if (opc1 == 0x01)
12884 {
12885 if (dp_op_sz)
12886 curr_insn_type = INSN_T1;
12887 else
12888 curr_insn_type = INSN_T2;
12889 }
12890 /* Handle VMUL. */
12891 else if (opc1 == 0x02 && !(opc3 & 0x01))
12892 {
12893 if (bit (arm_insn_r->arm_insn, 10))
12894 {
12895 if (bit (arm_insn_r->arm_insn, 6))
12896 curr_insn_type = INSN_T0;
12897 else
12898 curr_insn_type = INSN_T1;
12899 }
12900 else
12901 {
12902 if (dp_op_sz)
12903 curr_insn_type = INSN_T1;
12904 else
12905 curr_insn_type = INSN_T2;
12906 }
12907 }
12908 /* Handle VADD, VSUB. */
12909 else if (opc1 == 0x03)
12910 {
12911 if (!bit (arm_insn_r->arm_insn, 9))
12912 {
12913 if (bit (arm_insn_r->arm_insn, 6))
12914 curr_insn_type = INSN_T0;
12915 else
12916 curr_insn_type = INSN_T1;
12917 }
12918 else
12919 {
12920 if (dp_op_sz)
12921 curr_insn_type = INSN_T1;
12922 else
12923 curr_insn_type = INSN_T2;
12924 }
12925 }
12926 /* Handle VDIV. */
12927 else if (opc1 == 0x08)
12928 {
12929 if (dp_op_sz)
12930 curr_insn_type = INSN_T1;
12931 else
12932 curr_insn_type = INSN_T2;
12933 }
12934 /* Handle all other vfp data processing instructions. */
12935 else if (opc1 == 0x0b)
12936 {
12937 /* Handle VMOV. */
12938 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12939 {
12940 if (bit (arm_insn_r->arm_insn, 4))
12941 {
12942 if (bit (arm_insn_r->arm_insn, 6))
12943 curr_insn_type = INSN_T0;
12944 else
12945 curr_insn_type = INSN_T1;
12946 }
12947 else
12948 {
12949 if (dp_op_sz)
12950 curr_insn_type = INSN_T1;
12951 else
12952 curr_insn_type = INSN_T2;
12953 }
12954 }
12955 /* Handle VNEG and VABS. */
12956 else if ((opc2 == 0x01 && opc3 == 0x01)
12957 || (opc2 == 0x00 && opc3 == 0x03))
12958 {
12959 if (!bit (arm_insn_r->arm_insn, 11))
12960 {
12961 if (bit (arm_insn_r->arm_insn, 6))
12962 curr_insn_type = INSN_T0;
12963 else
12964 curr_insn_type = INSN_T1;
12965 }
12966 else
12967 {
12968 if (dp_op_sz)
12969 curr_insn_type = INSN_T1;
12970 else
12971 curr_insn_type = INSN_T2;
12972 }
12973 }
12974 /* Handle VSQRT. */
12975 else if (opc2 == 0x01 && opc3 == 0x03)
12976 {
12977 if (dp_op_sz)
12978 curr_insn_type = INSN_T1;
12979 else
12980 curr_insn_type = INSN_T2;
12981 }
12982 /* Handle VCVT. */
12983 else if (opc2 == 0x07 && opc3 == 0x03)
12984 {
12985 if (!dp_op_sz)
12986 curr_insn_type = INSN_T1;
12987 else
12988 curr_insn_type = INSN_T2;
12989 }
12990 else if (opc3 & 0x01)
12991 {
12992 /* Handle VCVT. */
12993 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12994 {
12995 if (!bit (arm_insn_r->arm_insn, 18))
12996 curr_insn_type = INSN_T2;
12997 else
12998 {
12999 if (dp_op_sz)
13000 curr_insn_type = INSN_T1;
13001 else
13002 curr_insn_type = INSN_T2;
13003 }
13004 }
13005 /* Handle VCVT. */
13006 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
13007 {
13008 if (dp_op_sz)
13009 curr_insn_type = INSN_T1;
13010 else
13011 curr_insn_type = INSN_T2;
13012 }
13013 /* Handle VCVTB, VCVTT. */
13014 else if ((opc2 & 0x0e) == 0x02)
13015 curr_insn_type = INSN_T2;
13016 /* Handle VCMP, VCMPE. */
13017 else if ((opc2 & 0x0e) == 0x04)
13018 curr_insn_type = INSN_T3;
13019 }
13020 }
13021
13022 switch (curr_insn_type)
13023 {
13024 case INSN_T0:
13025 reg_vd = reg_vd | (bit_d << 4);
13026 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13027 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
13028 arm_insn_r->reg_rec_count = 2;
13029 break;
13030
13031 case INSN_T1:
13032 reg_vd = reg_vd | (bit_d << 4);
13033 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13034 arm_insn_r->reg_rec_count = 1;
13035 break;
13036
13037 case INSN_T2:
13038 reg_vd = (reg_vd << 1) | bit_d;
13039 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13040 arm_insn_r->reg_rec_count = 1;
13041 break;
13042
13043 case INSN_T3:
13044 record_buf[0] = ARM_FPSCR_REGNUM;
13045 arm_insn_r->reg_rec_count = 1;
13046 break;
13047
13048 default:
13049 gdb_assert_not_reached ("no decoding pattern found");
13050 break;
13051 }
13052
13053 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
13054 return 0;
13055 }
13056
13057 /* Handling opcode 110 insns. */
13058
13059 static int
13060 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
13061 {
13062 uint32_t op1, op1_ebit, coproc;
13063
13064 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13065 op1 = bits (arm_insn_r->arm_insn, 20, 25);
13066 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13067
13068 if ((coproc & 0x0e) == 0x0a)
13069 {
13070 /* Handle extension register ld/st instructions. */
13071 if (!(op1 & 0x20))
13072 return arm_record_exreg_ld_st_insn (arm_insn_r);
13073
13074 /* 64-bit transfers between arm core and extension registers. */
13075 if ((op1 & 0x3e) == 0x04)
13076 return arm_record_exreg_ld_st_insn (arm_insn_r);
13077 }
13078 else
13079 {
13080 /* Handle coprocessor ld/st instructions. */
13081 if (!(op1 & 0x3a))
13082 {
13083 /* Store. */
13084 if (!op1_ebit)
13085 return arm_record_unsupported_insn (arm_insn_r);
13086 else
13087 /* Load. */
13088 return arm_record_unsupported_insn (arm_insn_r);
13089 }
13090
13091 /* Move to coprocessor from two arm core registers. */
13092 if (op1 == 0x4)
13093 return arm_record_unsupported_insn (arm_insn_r);
13094
13095 /* Move to two arm core registers from coprocessor. */
13096 if (op1 == 0x5)
13097 {
13098 uint32_t reg_t[2];
13099
13100 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13101 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13102 arm_insn_r->reg_rec_count = 2;
13103
13104 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13105 return 0;
13106 }
13107 }
13108 return arm_record_unsupported_insn (arm_insn_r);
13109 }
13110
13111 /* Handling opcode 111 insns. */
13112
13113 static int
13114 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13115 {
13116 uint32_t op, op1_ebit, coproc, bits_24_25;
13117 arm_gdbarch_tdep *tdep
13118 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13119 struct regcache *reg_cache = arm_insn_r->regcache;
13120
13121 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13122 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13123 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13124 op = bit (arm_insn_r->arm_insn, 4);
13125 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13126
13127 /* Handle arm SWI/SVC system call instructions. */
13128 if (bits_24_25 == 0x3)
13129 {
13130 if (tdep->arm_syscall_record != NULL)
13131 {
13132 ULONGEST svc_operand, svc_number;
13133
13134 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13135
13136 if (svc_operand) /* OABI. */
13137 svc_number = svc_operand - 0x900000;
13138 else /* EABI. */
13139 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13140
13141 return tdep->arm_syscall_record (reg_cache, svc_number);
13142 }
13143 else
13144 {
13145 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13146 return -1;
13147 }
13148 }
13149 else if (bits_24_25 == 0x02)
13150 {
13151 if (op)
13152 {
13153 if ((coproc & 0x0e) == 0x0a)
13154 {
13155 /* 8, 16, and 32-bit transfer */
13156 return arm_record_vdata_transfer_insn (arm_insn_r);
13157 }
13158 else
13159 {
13160 if (op1_ebit)
13161 {
13162 /* MRC, MRC2 */
13163 uint32_t record_buf[1];
13164
13165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13166 if (record_buf[0] == 15)
13167 record_buf[0] = ARM_PS_REGNUM;
13168
13169 arm_insn_r->reg_rec_count = 1;
13170 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13171 record_buf);
13172 return 0;
13173 }
13174 else
13175 {
13176 /* MCR, MCR2 */
13177 return -1;
13178 }
13179 }
13180 }
13181 else
13182 {
13183 if ((coproc & 0x0e) == 0x0a)
13184 {
13185 /* VFP data-processing instructions. */
13186 return arm_record_vfp_data_proc_insn (arm_insn_r);
13187 }
13188 else
13189 {
13190 /* CDP, CDP2 */
13191 return -1;
13192 }
13193 }
13194 }
13195 else
13196 {
13197 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13198
13199 if (op1 == 5)
13200 {
13201 if ((coproc & 0x0e) != 0x0a)
13202 {
13203 /* MRRC, MRRC2 */
13204 return -1;
13205 }
13206 }
13207 else if (op1 == 4 || op1 == 5)
13208 {
13209 if ((coproc & 0x0e) == 0x0a)
13210 {
13211 /* 64-bit transfers between ARM core and extension */
13212 return -1;
13213 }
13214 else if (op1 == 4)
13215 {
13216 /* MCRR, MCRR2 */
13217 return -1;
13218 }
13219 }
13220 else if (op1 == 0 || op1 == 1)
13221 {
13222 /* UNDEFINED */
13223 return -1;
13224 }
13225 else
13226 {
13227 if ((coproc & 0x0e) == 0x0a)
13228 {
13229 /* Extension register load/store */
13230 }
13231 else
13232 {
13233 /* STC, STC2, LDC, LDC2 */
13234 }
13235 return -1;
13236 }
13237 }
13238
13239 return -1;
13240 }
13241
13242 /* Handling opcode 000 insns. */
13243
13244 static int
13245 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13246 {
13247 uint32_t record_buf[8];
13248 uint32_t reg_src1 = 0;
13249
13250 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13251
13252 record_buf[0] = ARM_PS_REGNUM;
13253 record_buf[1] = reg_src1;
13254 thumb_insn_r->reg_rec_count = 2;
13255
13256 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13257
13258 return 0;
13259 }
13260
13261
13262 /* Handling opcode 001 insns. */
13263
13264 static int
13265 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13266 {
13267 uint32_t record_buf[8];
13268 uint32_t reg_src1 = 0;
13269
13270 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13271
13272 record_buf[0] = ARM_PS_REGNUM;
13273 record_buf[1] = reg_src1;
13274 thumb_insn_r->reg_rec_count = 2;
13275
13276 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13277
13278 return 0;
13279 }
13280
13281 /* Handling opcode 010 insns. */
13282
13283 static int
13284 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13285 {
13286 struct regcache *reg_cache = thumb_insn_r->regcache;
13287 uint32_t record_buf[8], record_buf_mem[8];
13288
13289 uint32_t reg_src1 = 0, reg_src2 = 0;
13290 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13291
13292 ULONGEST u_regval[2] = {0};
13293
13294 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13295
13296 if (bit (thumb_insn_r->arm_insn, 12))
13297 {
13298 /* Handle load/store register offset. */
13299 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13300
13301 if (in_inclusive_range (opB, 4U, 7U))
13302 {
13303 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13304 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13305 record_buf[0] = reg_src1;
13306 thumb_insn_r->reg_rec_count = 1;
13307 }
13308 else if (in_inclusive_range (opB, 0U, 2U))
13309 {
13310 /* STR(2), STRB(2), STRH(2) . */
13311 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13312 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13313 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13314 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13315 if (0 == opB)
13316 record_buf_mem[0] = 4; /* STR (2). */
13317 else if (2 == opB)
13318 record_buf_mem[0] = 1; /* STRB (2). */
13319 else if (1 == opB)
13320 record_buf_mem[0] = 2; /* STRH (2). */
13321 record_buf_mem[1] = u_regval[0] + u_regval[1];
13322 thumb_insn_r->mem_rec_count = 1;
13323 }
13324 }
13325 else if (bit (thumb_insn_r->arm_insn, 11))
13326 {
13327 /* Handle load from literal pool. */
13328 /* LDR(3). */
13329 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13330 record_buf[0] = reg_src1;
13331 thumb_insn_r->reg_rec_count = 1;
13332 }
13333 else if (opcode1)
13334 {
13335 /* Special data instructions and branch and exchange */
13336 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13337 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13338 if ((3 == opcode2) && (!opcode3))
13339 {
13340 /* Branch with exchange. */
13341 record_buf[0] = ARM_PS_REGNUM;
13342 thumb_insn_r->reg_rec_count = 1;
13343 }
13344 else
13345 {
13346 /* Format 8; special data processing insns. */
13347 record_buf[0] = ARM_PS_REGNUM;
13348 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13349 | bits (thumb_insn_r->arm_insn, 0, 2));
13350 thumb_insn_r->reg_rec_count = 2;
13351 }
13352 }
13353 else
13354 {
13355 /* Format 5; data processing insns. */
13356 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13357 if (bit (thumb_insn_r->arm_insn, 7))
13358 {
13359 reg_src1 = reg_src1 + 8;
13360 }
13361 record_buf[0] = ARM_PS_REGNUM;
13362 record_buf[1] = reg_src1;
13363 thumb_insn_r->reg_rec_count = 2;
13364 }
13365
13366 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13367 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13368 record_buf_mem);
13369
13370 return 0;
13371 }
13372
13373 /* Handling opcode 001 insns. */
13374
13375 static int
13376 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13377 {
13378 struct regcache *reg_cache = thumb_insn_r->regcache;
13379 uint32_t record_buf[8], record_buf_mem[8];
13380
13381 uint32_t reg_src1 = 0;
13382 uint32_t opcode = 0, immed_5 = 0;
13383
13384 ULONGEST u_regval = 0;
13385
13386 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13387
13388 if (opcode)
13389 {
13390 /* LDR(1). */
13391 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13392 record_buf[0] = reg_src1;
13393 thumb_insn_r->reg_rec_count = 1;
13394 }
13395 else
13396 {
13397 /* STR(1). */
13398 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13399 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13400 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13401 record_buf_mem[0] = 4;
13402 record_buf_mem[1] = u_regval + (immed_5 * 4);
13403 thumb_insn_r->mem_rec_count = 1;
13404 }
13405
13406 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13407 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13408 record_buf_mem);
13409
13410 return 0;
13411 }
13412
13413 /* Handling opcode 100 insns. */
13414
13415 static int
13416 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13417 {
13418 struct regcache *reg_cache = thumb_insn_r->regcache;
13419 uint32_t record_buf[8], record_buf_mem[8];
13420
13421 uint32_t reg_src1 = 0;
13422 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13423
13424 ULONGEST u_regval = 0;
13425
13426 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13427
13428 if (3 == opcode)
13429 {
13430 /* LDR(4). */
13431 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13432 record_buf[0] = reg_src1;
13433 thumb_insn_r->reg_rec_count = 1;
13434 }
13435 else if (1 == opcode)
13436 {
13437 /* LDRH(1). */
13438 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13439 record_buf[0] = reg_src1;
13440 thumb_insn_r->reg_rec_count = 1;
13441 }
13442 else if (2 == opcode)
13443 {
13444 /* STR(3). */
13445 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13446 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13447 record_buf_mem[0] = 4;
13448 record_buf_mem[1] = u_regval + (immed_8 * 4);
13449 thumb_insn_r->mem_rec_count = 1;
13450 }
13451 else if (0 == opcode)
13452 {
13453 /* STRH(1). */
13454 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13455 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13456 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13457 record_buf_mem[0] = 2;
13458 record_buf_mem[1] = u_regval + (immed_5 * 2);
13459 thumb_insn_r->mem_rec_count = 1;
13460 }
13461
13462 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13463 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13464 record_buf_mem);
13465
13466 return 0;
13467 }
13468
13469 /* Handling opcode 101 insns. */
13470
13471 static int
13472 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13473 {
13474 struct regcache *reg_cache = thumb_insn_r->regcache;
13475
13476 uint32_t opcode = 0;
13477 uint32_t register_bits = 0, register_count = 0;
13478 uint32_t index = 0, start_address = 0;
13479 uint32_t record_buf[24], record_buf_mem[48];
13480 uint32_t reg_src1;
13481
13482 ULONGEST u_regval = 0;
13483
13484 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13485
13486 if (opcode == 0 || opcode == 1)
13487 {
13488 /* ADR and ADD (SP plus immediate) */
13489
13490 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13491 record_buf[0] = reg_src1;
13492 thumb_insn_r->reg_rec_count = 1;
13493 }
13494 else
13495 {
13496 /* Miscellaneous 16-bit instructions */
13497 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13498
13499 switch (opcode2)
13500 {
13501 case 6:
13502 /* SETEND and CPS */
13503 break;
13504 case 0:
13505 /* ADD/SUB (SP plus immediate) */
13506 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13507 record_buf[0] = ARM_SP_REGNUM;
13508 thumb_insn_r->reg_rec_count = 1;
13509 break;
13510 case 1: /* fall through */
13511 case 3: /* fall through */
13512 case 9: /* fall through */
13513 case 11:
13514 /* CBNZ, CBZ */
13515 break;
13516 case 2:
13517 /* SXTH, SXTB, UXTH, UXTB */
13518 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13519 thumb_insn_r->reg_rec_count = 1;
13520 break;
13521 case 4: /* fall through */
13522 case 5:
13523 /* PUSH. */
13524 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13525 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13526 while (register_bits)
13527 {
13528 if (register_bits & 0x00000001)
13529 register_count++;
13530 register_bits = register_bits >> 1;
13531 }
13532 start_address = u_regval - \
13533 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13534 thumb_insn_r->mem_rec_count = register_count;
13535 while (register_count)
13536 {
13537 record_buf_mem[(register_count * 2) - 1] = start_address;
13538 record_buf_mem[(register_count * 2) - 2] = 4;
13539 start_address = start_address + 4;
13540 register_count--;
13541 }
13542 record_buf[0] = ARM_SP_REGNUM;
13543 thumb_insn_r->reg_rec_count = 1;
13544 break;
13545 case 10:
13546 /* REV, REV16, REVSH */
13547 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13548 thumb_insn_r->reg_rec_count = 1;
13549 break;
13550 case 12: /* fall through */
13551 case 13:
13552 /* POP. */
13553 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13554 while (register_bits)
13555 {
13556 if (register_bits & 0x00000001)
13557 record_buf[index++] = register_count;
13558 register_bits = register_bits >> 1;
13559 register_count++;
13560 }
13561 record_buf[index++] = ARM_PS_REGNUM;
13562 record_buf[index++] = ARM_SP_REGNUM;
13563 thumb_insn_r->reg_rec_count = index;
13564 break;
13565 case 0xe:
13566 /* BKPT insn. */
13567 /* Handle enhanced software breakpoint insn, BKPT. */
13568 /* CPSR is changed to be executed in ARM state, disabling normal
13569 interrupts, entering abort mode. */
13570 /* According to high vector configuration PC is set. */
13571 /* User hits breakpoint and type reverse, in that case, we need to go back with
13572 previous CPSR and Program Counter. */
13573 record_buf[0] = ARM_PS_REGNUM;
13574 record_buf[1] = ARM_LR_REGNUM;
13575 thumb_insn_r->reg_rec_count = 2;
13576 /* We need to save SPSR value, which is not yet done. */
13577 gdb_printf (gdb_stderr,
13578 _("Process record does not support instruction "
13579 "0x%0x at address %s.\n"),
13580 thumb_insn_r->arm_insn,
13581 paddress (thumb_insn_r->gdbarch,
13582 thumb_insn_r->this_addr));
13583 return -1;
13584
13585 case 0xf:
13586 /* If-Then, and hints */
13587 break;
13588 default:
13589 return -1;
13590 };
13591 }
13592
13593 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13594 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13595 record_buf_mem);
13596
13597 return 0;
13598 }
13599
13600 /* Handling opcode 110 insns. */
13601
13602 static int
13603 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13604 {
13605 arm_gdbarch_tdep *tdep
13606 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13607 struct regcache *reg_cache = thumb_insn_r->regcache;
13608
13609 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13610 uint32_t reg_src1 = 0;
13611 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13612 uint32_t index = 0, start_address = 0;
13613 uint32_t record_buf[24], record_buf_mem[48];
13614
13615 ULONGEST u_regval = 0;
13616
13617 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13618 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13619
13620 if (1 == opcode2)
13621 {
13622
13623 /* LDMIA. */
13624 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13625 /* Get Rn. */
13626 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13627 while (register_bits)
13628 {
13629 if (register_bits & 0x00000001)
13630 record_buf[index++] = register_count;
13631 register_bits = register_bits >> 1;
13632 register_count++;
13633 }
13634 record_buf[index++] = reg_src1;
13635 thumb_insn_r->reg_rec_count = index;
13636 }
13637 else if (0 == opcode2)
13638 {
13639 /* It handles both STMIA. */
13640 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13641 /* Get Rn. */
13642 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13643 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13644 while (register_bits)
13645 {
13646 if (register_bits & 0x00000001)
13647 register_count++;
13648 register_bits = register_bits >> 1;
13649 }
13650 start_address = u_regval;
13651 thumb_insn_r->mem_rec_count = register_count;
13652 while (register_count)
13653 {
13654 record_buf_mem[(register_count * 2) - 1] = start_address;
13655 record_buf_mem[(register_count * 2) - 2] = 4;
13656 start_address = start_address + 4;
13657 register_count--;
13658 }
13659 }
13660 else if (0x1F == opcode1)
13661 {
13662 /* Handle arm syscall insn. */
13663 if (tdep->arm_syscall_record != NULL)
13664 {
13665 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13666 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13667 }
13668 else
13669 {
13670 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13671 return -1;
13672 }
13673 }
13674
13675 /* B (1), conditional branch is automatically taken care in process_record,
13676 as PC is saved there. */
13677
13678 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13679 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13680 record_buf_mem);
13681
13682 return ret;
13683 }
13684
13685 /* Handling opcode 111 insns. */
13686
13687 static int
13688 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13689 {
13690 uint32_t record_buf[8];
13691 uint32_t bits_h = 0;
13692
13693 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13694
13695 if (2 == bits_h || 3 == bits_h)
13696 {
13697 /* BL */
13698 record_buf[0] = ARM_LR_REGNUM;
13699 thumb_insn_r->reg_rec_count = 1;
13700 }
13701 else if (1 == bits_h)
13702 {
13703 /* BLX(1). */
13704 record_buf[0] = ARM_PS_REGNUM;
13705 record_buf[1] = ARM_LR_REGNUM;
13706 thumb_insn_r->reg_rec_count = 2;
13707 }
13708
13709 /* B(2) is automatically taken care in process_record, as PC is
13710 saved there. */
13711
13712 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13713
13714 return 0;
13715 }
13716
13717 /* Handler for thumb2 load/store multiple instructions. */
13718
13719 static int
13720 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13721 {
13722 struct regcache *reg_cache = thumb2_insn_r->regcache;
13723
13724 uint32_t reg_rn, op;
13725 uint32_t register_bits = 0, register_count = 0;
13726 uint32_t index = 0, start_address = 0;
13727 uint32_t record_buf[24], record_buf_mem[48];
13728
13729 ULONGEST u_regval = 0;
13730
13731 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13732 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13733
13734 if (0 == op || 3 == op)
13735 {
13736 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13737 {
13738 /* Handle RFE instruction. */
13739 record_buf[0] = ARM_PS_REGNUM;
13740 thumb2_insn_r->reg_rec_count = 1;
13741 }
13742 else
13743 {
13744 /* Handle SRS instruction after reading banked SP. */
13745 return arm_record_unsupported_insn (thumb2_insn_r);
13746 }
13747 }
13748 else if (1 == op || 2 == op)
13749 {
13750 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13751 {
13752 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13753 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13754 while (register_bits)
13755 {
13756 if (register_bits & 0x00000001)
13757 record_buf[index++] = register_count;
13758
13759 register_count++;
13760 register_bits = register_bits >> 1;
13761 }
13762 record_buf[index++] = reg_rn;
13763 record_buf[index++] = ARM_PS_REGNUM;
13764 thumb2_insn_r->reg_rec_count = index;
13765 }
13766 else
13767 {
13768 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13769 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13770 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13771 while (register_bits)
13772 {
13773 if (register_bits & 0x00000001)
13774 register_count++;
13775
13776 register_bits = register_bits >> 1;
13777 }
13778
13779 if (1 == op)
13780 {
13781 /* Start address calculation for LDMDB/LDMEA. */
13782 start_address = u_regval;
13783 }
13784 else if (2 == op)
13785 {
13786 /* Start address calculation for LDMDB/LDMEA. */
13787 start_address = u_regval - register_count * 4;
13788 }
13789
13790 thumb2_insn_r->mem_rec_count = register_count;
13791 while (register_count)
13792 {
13793 record_buf_mem[register_count * 2 - 1] = start_address;
13794 record_buf_mem[register_count * 2 - 2] = 4;
13795 start_address = start_address + 4;
13796 register_count--;
13797 }
13798 record_buf[0] = reg_rn;
13799 record_buf[1] = ARM_PS_REGNUM;
13800 thumb2_insn_r->reg_rec_count = 2;
13801 }
13802 }
13803
13804 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13805 record_buf_mem);
13806 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13807 record_buf);
13808 return ARM_RECORD_SUCCESS;
13809 }
13810
13811 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13812 instructions. */
13813
13814 static int
13815 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13816 {
13817 struct regcache *reg_cache = thumb2_insn_r->regcache;
13818
13819 uint32_t reg_rd, reg_rn, offset_imm;
13820 uint32_t reg_dest1, reg_dest2;
13821 uint32_t address, offset_addr;
13822 uint32_t record_buf[8], record_buf_mem[8];
13823 uint32_t op1, op2, op3;
13824
13825 ULONGEST u_regval[2];
13826
13827 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13828 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13829 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13830
13831 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13832 {
13833 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13834 {
13835 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13836 record_buf[0] = reg_dest1;
13837 record_buf[1] = ARM_PS_REGNUM;
13838 thumb2_insn_r->reg_rec_count = 2;
13839 }
13840
13841 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13842 {
13843 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13844 record_buf[2] = reg_dest2;
13845 thumb2_insn_r->reg_rec_count = 3;
13846 }
13847 }
13848 else
13849 {
13850 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13851 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13852
13853 if (0 == op1 && 0 == op2)
13854 {
13855 /* Handle STREX. */
13856 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13857 address = u_regval[0] + (offset_imm * 4);
13858 record_buf_mem[0] = 4;
13859 record_buf_mem[1] = address;
13860 thumb2_insn_r->mem_rec_count = 1;
13861 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13862 record_buf[0] = reg_rd;
13863 thumb2_insn_r->reg_rec_count = 1;
13864 }
13865 else if (1 == op1 && 0 == op2)
13866 {
13867 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13868 record_buf[0] = reg_rd;
13869 thumb2_insn_r->reg_rec_count = 1;
13870 address = u_regval[0];
13871 record_buf_mem[1] = address;
13872
13873 if (4 == op3)
13874 {
13875 /* Handle STREXB. */
13876 record_buf_mem[0] = 1;
13877 thumb2_insn_r->mem_rec_count = 1;
13878 }
13879 else if (5 == op3)
13880 {
13881 /* Handle STREXH. */
13882 record_buf_mem[0] = 2 ;
13883 thumb2_insn_r->mem_rec_count = 1;
13884 }
13885 else if (7 == op3)
13886 {
13887 /* Handle STREXD. */
13888 address = u_regval[0];
13889 record_buf_mem[0] = 4;
13890 record_buf_mem[2] = 4;
13891 record_buf_mem[3] = address + 4;
13892 thumb2_insn_r->mem_rec_count = 2;
13893 }
13894 }
13895 else
13896 {
13897 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13898
13899 if (bit (thumb2_insn_r->arm_insn, 24))
13900 {
13901 if (bit (thumb2_insn_r->arm_insn, 23))
13902 offset_addr = u_regval[0] + (offset_imm * 4);
13903 else
13904 offset_addr = u_regval[0] - (offset_imm * 4);
13905
13906 address = offset_addr;
13907 }
13908 else
13909 address = u_regval[0];
13910
13911 record_buf_mem[0] = 4;
13912 record_buf_mem[1] = address;
13913 record_buf_mem[2] = 4;
13914 record_buf_mem[3] = address + 4;
13915 thumb2_insn_r->mem_rec_count = 2;
13916 record_buf[0] = reg_rn;
13917 thumb2_insn_r->reg_rec_count = 1;
13918 }
13919 }
13920
13921 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13922 record_buf);
13923 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13924 record_buf_mem);
13925 return ARM_RECORD_SUCCESS;
13926 }
13927
13928 /* Handler for thumb2 data processing (shift register and modified immediate)
13929 instructions. */
13930
13931 static int
13932 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13933 {
13934 uint32_t reg_rd, op;
13935 uint32_t record_buf[8];
13936
13937 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13938 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13939
13940 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13941 {
13942 record_buf[0] = ARM_PS_REGNUM;
13943 thumb2_insn_r->reg_rec_count = 1;
13944 }
13945 else
13946 {
13947 record_buf[0] = reg_rd;
13948 record_buf[1] = ARM_PS_REGNUM;
13949 thumb2_insn_r->reg_rec_count = 2;
13950 }
13951
13952 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13953 record_buf);
13954 return ARM_RECORD_SUCCESS;
13955 }
13956
13957 /* Generic handler for thumb2 instructions which effect destination and PS
13958 registers. */
13959
13960 static int
13961 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13962 {
13963 uint32_t reg_rd;
13964 uint32_t record_buf[8];
13965
13966 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13967
13968 record_buf[0] = reg_rd;
13969 record_buf[1] = ARM_PS_REGNUM;
13970 thumb2_insn_r->reg_rec_count = 2;
13971
13972 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13973 record_buf);
13974 return ARM_RECORD_SUCCESS;
13975 }
13976
13977 /* Handler for thumb2 branch and miscellaneous control instructions. */
13978
13979 static int
13980 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13981 {
13982 uint32_t op, op1, op2;
13983 uint32_t record_buf[8];
13984
13985 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13986 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13987 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13988
13989 /* Handle MSR insn. */
13990 if (!(op1 & 0x2) && 0x38 == op)
13991 {
13992 if (!(op2 & 0x3))
13993 {
13994 /* CPSR is going to be changed. */
13995 record_buf[0] = ARM_PS_REGNUM;
13996 thumb2_insn_r->reg_rec_count = 1;
13997 }
13998 else
13999 {
14000 arm_record_unsupported_insn(thumb2_insn_r);
14001 return -1;
14002 }
14003 }
14004 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
14005 {
14006 /* BLX. */
14007 record_buf[0] = ARM_PS_REGNUM;
14008 record_buf[1] = ARM_LR_REGNUM;
14009 thumb2_insn_r->reg_rec_count = 2;
14010 }
14011
14012 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14013 record_buf);
14014 return ARM_RECORD_SUCCESS;
14015 }
14016
14017 /* Handler for thumb2 store single data item instructions. */
14018
14019 static int
14020 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
14021 {
14022 struct regcache *reg_cache = thumb2_insn_r->regcache;
14023
14024 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
14025 uint32_t address, offset_addr;
14026 uint32_t record_buf[8], record_buf_mem[8];
14027 uint32_t op1, op2;
14028
14029 ULONGEST u_regval[2];
14030
14031 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
14032 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
14033 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14034 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
14035
14036 if (bit (thumb2_insn_r->arm_insn, 23))
14037 {
14038 /* T2 encoding. */
14039 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
14040 offset_addr = u_regval[0] + offset_imm;
14041 address = offset_addr;
14042 }
14043 else
14044 {
14045 /* T3 encoding. */
14046 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
14047 {
14048 /* Handle STRB (register). */
14049 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
14050 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
14051 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
14052 offset_addr = u_regval[1] << shift_imm;
14053 address = u_regval[0] + offset_addr;
14054 }
14055 else
14056 {
14057 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
14058 if (bit (thumb2_insn_r->arm_insn, 10))
14059 {
14060 if (bit (thumb2_insn_r->arm_insn, 9))
14061 offset_addr = u_regval[0] + offset_imm;
14062 else
14063 offset_addr = u_regval[0] - offset_imm;
14064
14065 address = offset_addr;
14066 }
14067 else
14068 address = u_regval[0];
14069 }
14070 }
14071
14072 switch (op1)
14073 {
14074 /* Store byte instructions. */
14075 case 4:
14076 case 0:
14077 record_buf_mem[0] = 1;
14078 break;
14079 /* Store half word instructions. */
14080 case 1:
14081 case 5:
14082 record_buf_mem[0] = 2;
14083 break;
14084 /* Store word instructions. */
14085 case 2:
14086 case 6:
14087 record_buf_mem[0] = 4;
14088 break;
14089
14090 default:
14091 gdb_assert_not_reached ("no decoding pattern found");
14092 break;
14093 }
14094
14095 record_buf_mem[1] = address;
14096 thumb2_insn_r->mem_rec_count = 1;
14097 record_buf[0] = reg_rn;
14098 thumb2_insn_r->reg_rec_count = 1;
14099
14100 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14101 record_buf);
14102 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14103 record_buf_mem);
14104 return ARM_RECORD_SUCCESS;
14105 }
14106
14107 /* Handler for thumb2 load memory hints instructions. */
14108
14109 static int
14110 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14111 {
14112 uint32_t record_buf[8];
14113 uint32_t reg_rt, reg_rn;
14114
14115 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14116 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14117
14118 if (ARM_PC_REGNUM != reg_rt)
14119 {
14120 record_buf[0] = reg_rt;
14121 record_buf[1] = reg_rn;
14122 record_buf[2] = ARM_PS_REGNUM;
14123 thumb2_insn_r->reg_rec_count = 3;
14124
14125 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14126 record_buf);
14127 return ARM_RECORD_SUCCESS;
14128 }
14129
14130 return ARM_RECORD_FAILURE;
14131 }
14132
14133 /* Handler for thumb2 load word instructions. */
14134
14135 static int
14136 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14137 {
14138 uint32_t record_buf[8];
14139
14140 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14141 record_buf[1] = ARM_PS_REGNUM;
14142 thumb2_insn_r->reg_rec_count = 2;
14143
14144 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14145 record_buf);
14146 return ARM_RECORD_SUCCESS;
14147 }
14148
14149 /* Handler for thumb2 long multiply, long multiply accumulate, and
14150 divide instructions. */
14151
14152 static int
14153 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14154 {
14155 uint32_t opcode1 = 0, opcode2 = 0;
14156 uint32_t record_buf[8];
14157
14158 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14159 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14160
14161 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14162 {
14163 /* Handle SMULL, UMULL, SMULAL. */
14164 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14165 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14166 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14167 record_buf[2] = ARM_PS_REGNUM;
14168 thumb2_insn_r->reg_rec_count = 3;
14169 }
14170 else if (1 == opcode1 || 3 == opcode2)
14171 {
14172 /* Handle SDIV and UDIV. */
14173 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14174 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14175 record_buf[2] = ARM_PS_REGNUM;
14176 thumb2_insn_r->reg_rec_count = 3;
14177 }
14178 else
14179 return ARM_RECORD_FAILURE;
14180
14181 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14182 record_buf);
14183 return ARM_RECORD_SUCCESS;
14184 }
14185
14186 /* Record handler for thumb32 coprocessor instructions. */
14187
14188 static int
14189 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14190 {
14191 if (bit (thumb2_insn_r->arm_insn, 25))
14192 return arm_record_coproc_data_proc (thumb2_insn_r);
14193 else
14194 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14195 }
14196
14197 /* Record handler for advance SIMD structure load/store instructions. */
14198
14199 static int
14200 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14201 {
14202 struct regcache *reg_cache = thumb2_insn_r->regcache;
14203 uint32_t l_bit, a_bit, b_bits;
14204 uint32_t record_buf[128], record_buf_mem[128];
14205 uint32_t reg_rn, reg_vd, address, f_elem;
14206 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14207 uint8_t f_ebytes;
14208
14209 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14210 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14211 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14212 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14213 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14214 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14215 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14216 f_elem = 8 / f_ebytes;
14217
14218 if (!l_bit)
14219 {
14220 ULONGEST u_regval = 0;
14221 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14222 address = u_regval;
14223
14224 if (!a_bit)
14225 {
14226 /* Handle VST1. */
14227 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14228 {
14229 if (b_bits == 0x07)
14230 bf_regs = 1;
14231 else if (b_bits == 0x0a)
14232 bf_regs = 2;
14233 else if (b_bits == 0x06)
14234 bf_regs = 3;
14235 else if (b_bits == 0x02)
14236 bf_regs = 4;
14237 else
14238 bf_regs = 0;
14239
14240 for (index_r = 0; index_r < bf_regs; index_r++)
14241 {
14242 for (index_e = 0; index_e < f_elem; index_e++)
14243 {
14244 record_buf_mem[index_m++] = f_ebytes;
14245 record_buf_mem[index_m++] = address;
14246 address = address + f_ebytes;
14247 thumb2_insn_r->mem_rec_count += 1;
14248 }
14249 }
14250 }
14251 /* Handle VST2. */
14252 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14253 {
14254 if (b_bits == 0x09 || b_bits == 0x08)
14255 bf_regs = 1;
14256 else if (b_bits == 0x03)
14257 bf_regs = 2;
14258 else
14259 bf_regs = 0;
14260
14261 for (index_r = 0; index_r < bf_regs; index_r++)
14262 for (index_e = 0; index_e < f_elem; index_e++)
14263 {
14264 for (loop_t = 0; loop_t < 2; loop_t++)
14265 {
14266 record_buf_mem[index_m++] = f_ebytes;
14267 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14268 thumb2_insn_r->mem_rec_count += 1;
14269 }
14270 address = address + (2 * f_ebytes);
14271 }
14272 }
14273 /* Handle VST3. */
14274 else if ((b_bits & 0x0e) == 0x04)
14275 {
14276 for (index_e = 0; index_e < f_elem; index_e++)
14277 {
14278 for (loop_t = 0; loop_t < 3; loop_t++)
14279 {
14280 record_buf_mem[index_m++] = f_ebytes;
14281 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14282 thumb2_insn_r->mem_rec_count += 1;
14283 }
14284 address = address + (3 * f_ebytes);
14285 }
14286 }
14287 /* Handle VST4. */
14288 else if (!(b_bits & 0x0e))
14289 {
14290 for (index_e = 0; index_e < f_elem; index_e++)
14291 {
14292 for (loop_t = 0; loop_t < 4; loop_t++)
14293 {
14294 record_buf_mem[index_m++] = f_ebytes;
14295 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14296 thumb2_insn_r->mem_rec_count += 1;
14297 }
14298 address = address + (4 * f_ebytes);
14299 }
14300 }
14301 }
14302 else
14303 {
14304 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14305
14306 if (bft_size == 0x00)
14307 f_ebytes = 1;
14308 else if (bft_size == 0x01)
14309 f_ebytes = 2;
14310 else if (bft_size == 0x02)
14311 f_ebytes = 4;
14312 else
14313 f_ebytes = 0;
14314
14315 /* Handle VST1. */
14316 if (!(b_bits & 0x0b) || b_bits == 0x08)
14317 thumb2_insn_r->mem_rec_count = 1;
14318 /* Handle VST2. */
14319 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14320 thumb2_insn_r->mem_rec_count = 2;
14321 /* Handle VST3. */
14322 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14323 thumb2_insn_r->mem_rec_count = 3;
14324 /* Handle VST4. */
14325 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14326 thumb2_insn_r->mem_rec_count = 4;
14327
14328 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14329 {
14330 record_buf_mem[index_m] = f_ebytes;
14331 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14332 }
14333 }
14334 }
14335 else
14336 {
14337 if (!a_bit)
14338 {
14339 /* Handle VLD1. */
14340 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14341 thumb2_insn_r->reg_rec_count = 1;
14342 /* Handle VLD2. */
14343 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14344 thumb2_insn_r->reg_rec_count = 2;
14345 /* Handle VLD3. */
14346 else if ((b_bits & 0x0e) == 0x04)
14347 thumb2_insn_r->reg_rec_count = 3;
14348 /* Handle VLD4. */
14349 else if (!(b_bits & 0x0e))
14350 thumb2_insn_r->reg_rec_count = 4;
14351 }
14352 else
14353 {
14354 /* Handle VLD1. */
14355 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14356 thumb2_insn_r->reg_rec_count = 1;
14357 /* Handle VLD2. */
14358 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14359 thumb2_insn_r->reg_rec_count = 2;
14360 /* Handle VLD3. */
14361 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14362 thumb2_insn_r->reg_rec_count = 3;
14363 /* Handle VLD4. */
14364 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14365 thumb2_insn_r->reg_rec_count = 4;
14366
14367 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14368 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14369 }
14370 }
14371
14372 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14373 {
14374 record_buf[index_r] = reg_rn;
14375 thumb2_insn_r->reg_rec_count += 1;
14376 }
14377
14378 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14379 record_buf);
14380 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14381 record_buf_mem);
14382 return 0;
14383 }
14384
14385 /* Decodes thumb2 instruction type and invokes its record handler. */
14386
14387 static unsigned int
14388 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14389 {
14390 uint32_t op, op1, op2;
14391
14392 op = bit (thumb2_insn_r->arm_insn, 15);
14393 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14394 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14395
14396 if (op1 == 0x01)
14397 {
14398 if (!(op2 & 0x64 ))
14399 {
14400 /* Load/store multiple instruction. */
14401 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14402 }
14403 else if ((op2 & 0x64) == 0x4)
14404 {
14405 /* Load/store (dual/exclusive) and table branch instruction. */
14406 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14407 }
14408 else if ((op2 & 0x60) == 0x20)
14409 {
14410 /* Data-processing (shifted register). */
14411 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14412 }
14413 else if (op2 & 0x40)
14414 {
14415 /* Co-processor instructions. */
14416 return thumb2_record_coproc_insn (thumb2_insn_r);
14417 }
14418 }
14419 else if (op1 == 0x02)
14420 {
14421 if (op)
14422 {
14423 /* Branches and miscellaneous control instructions. */
14424 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14425 }
14426 else if (op2 & 0x20)
14427 {
14428 /* Data-processing (plain binary immediate) instruction. */
14429 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14430 }
14431 else
14432 {
14433 /* Data-processing (modified immediate). */
14434 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14435 }
14436 }
14437 else if (op1 == 0x03)
14438 {
14439 if (!(op2 & 0x71 ))
14440 {
14441 /* Store single data item. */
14442 return thumb2_record_str_single_data (thumb2_insn_r);
14443 }
14444 else if (!((op2 & 0x71) ^ 0x10))
14445 {
14446 /* Advanced SIMD or structure load/store instructions. */
14447 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14448 }
14449 else if (!((op2 & 0x67) ^ 0x01))
14450 {
14451 /* Load byte, memory hints instruction. */
14452 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14453 }
14454 else if (!((op2 & 0x67) ^ 0x03))
14455 {
14456 /* Load halfword, memory hints instruction. */
14457 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14458 }
14459 else if (!((op2 & 0x67) ^ 0x05))
14460 {
14461 /* Load word instruction. */
14462 return thumb2_record_ld_word (thumb2_insn_r);
14463 }
14464 else if (!((op2 & 0x70) ^ 0x20))
14465 {
14466 /* Data-processing (register) instruction. */
14467 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14468 }
14469 else if (!((op2 & 0x78) ^ 0x30))
14470 {
14471 /* Multiply, multiply accumulate, abs diff instruction. */
14472 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14473 }
14474 else if (!((op2 & 0x78) ^ 0x38))
14475 {
14476 /* Long multiply, long multiply accumulate, and divide. */
14477 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14478 }
14479 else if (op2 & 0x40)
14480 {
14481 /* Co-processor instructions. */
14482 return thumb2_record_coproc_insn (thumb2_insn_r);
14483 }
14484 }
14485
14486 return -1;
14487 }
14488
14489 namespace {
14490 /* Abstract instruction reader. */
14491
14492 class abstract_instruction_reader
14493 {
14494 public:
14495 /* Read one instruction of size LEN from address MEMADDR and using
14496 BYTE_ORDER endianness. */
14497
14498 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14499 enum bfd_endian byte_order) = 0;
14500 };
14501
14502 /* Instruction reader from real target. */
14503
14504 class instruction_reader : public abstract_instruction_reader
14505 {
14506 public:
14507 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14508 enum bfd_endian byte_order) override
14509 {
14510 return read_code_unsigned_integer (memaddr, len, byte_order);
14511 }
14512 };
14513
14514 } // namespace
14515
14516 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14517
14518 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14519 dispatch it. */
14520
14521 static int
14522 decode_insn (abstract_instruction_reader &reader,
14523 arm_insn_decode_record *arm_record,
14524 record_type_t record_type, uint32_t insn_size)
14525 {
14526
14527 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14528 instruction. */
14529 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14530 {
14531 arm_record_data_proc_misc_ld_str, /* 000. */
14532 arm_record_data_proc_imm, /* 001. */
14533 arm_record_ld_st_imm_offset, /* 010. */
14534 arm_record_ld_st_reg_offset, /* 011. */
14535 arm_record_ld_st_multiple, /* 100. */
14536 arm_record_b_bl, /* 101. */
14537 arm_record_asimd_vfp_coproc, /* 110. */
14538 arm_record_coproc_data_proc /* 111. */
14539 };
14540
14541 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14542 instruction. */
14543 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14544 { \
14545 thumb_record_shift_add_sub, /* 000. */
14546 thumb_record_add_sub_cmp_mov, /* 001. */
14547 thumb_record_ld_st_reg_offset, /* 010. */
14548 thumb_record_ld_st_imm_offset, /* 011. */
14549 thumb_record_ld_st_stack, /* 100. */
14550 thumb_record_misc, /* 101. */
14551 thumb_record_ldm_stm_swi, /* 110. */
14552 thumb_record_branch /* 111. */
14553 };
14554
14555 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14556 uint32_t insn_id = 0;
14557 enum bfd_endian code_endian
14558 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14559 arm_record->arm_insn
14560 = reader.read (arm_record->this_addr, insn_size, code_endian);
14561
14562 if (ARM_RECORD == record_type)
14563 {
14564 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14565 insn_id = bits (arm_record->arm_insn, 25, 27);
14566
14567 if (arm_record->cond == 0xf)
14568 ret = arm_record_extension_space (arm_record);
14569 else
14570 {
14571 /* If this insn has fallen into extension space
14572 then we need not decode it anymore. */
14573 ret = arm_handle_insn[insn_id] (arm_record);
14574 }
14575 if (ret != ARM_RECORD_SUCCESS)
14576 {
14577 arm_record_unsupported_insn (arm_record);
14578 ret = -1;
14579 }
14580 }
14581 else if (THUMB_RECORD == record_type)
14582 {
14583 /* As thumb does not have condition codes, we set negative. */
14584 arm_record->cond = -1;
14585 insn_id = bits (arm_record->arm_insn, 13, 15);
14586 ret = thumb_handle_insn[insn_id] (arm_record);
14587 if (ret != ARM_RECORD_SUCCESS)
14588 {
14589 arm_record_unsupported_insn (arm_record);
14590 ret = -1;
14591 }
14592 }
14593 else if (THUMB2_RECORD == record_type)
14594 {
14595 /* As thumb does not have condition codes, we set negative. */
14596 arm_record->cond = -1;
14597
14598 /* Swap first half of 32bit thumb instruction with second half. */
14599 arm_record->arm_insn
14600 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14601
14602 ret = thumb2_record_decode_insn_handler (arm_record);
14603
14604 if (ret != ARM_RECORD_SUCCESS)
14605 {
14606 arm_record_unsupported_insn (arm_record);
14607 ret = -1;
14608 }
14609 }
14610 else
14611 {
14612 /* Throw assertion. */
14613 gdb_assert_not_reached ("not a valid instruction, could not decode");
14614 }
14615
14616 return ret;
14617 }
14618
14619 #if GDB_SELF_TEST
14620 namespace selftests {
14621
14622 /* Instruction reader class for selftests.
14623
14624 For 16-bit Thumb instructions, an array of uint16_t should be used.
14625
14626 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14627 of uint32_t should be used. */
14628
14629 template<typename T>
14630 class instruction_reader_selftest : public abstract_instruction_reader
14631 {
14632 public:
14633 template<size_t SIZE>
14634 instruction_reader_selftest (const T (&insns)[SIZE])
14635 : m_insns (insns), m_insns_size (SIZE)
14636 {}
14637
14638 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14639 enum bfd_endian byte_order) override
14640 {
14641 SELF_CHECK (length == sizeof (T));
14642 SELF_CHECK (memaddr % sizeof (T) == 0);
14643 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14644
14645 return m_insns[memaddr / sizeof (T)];
14646 }
14647
14648 private:
14649 const T *m_insns;
14650 const size_t m_insns_size;
14651 };
14652
14653 static void
14654 arm_record_test (void)
14655 {
14656 struct gdbarch_info info;
14657 info.bfd_arch_info = bfd_scan_arch ("arm");
14658
14659 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14660
14661 SELF_CHECK (gdbarch != NULL);
14662
14663 /* 16-bit Thumb instructions. */
14664 {
14665 arm_insn_decode_record arm_record;
14666
14667 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14668 arm_record.gdbarch = gdbarch;
14669
14670 /* Use the endian-free representation of the instructions here. The test
14671 will handle endianness conversions. */
14672 static const uint16_t insns[] = {
14673 /* db b2 uxtb r3, r3 */
14674 0xb2db,
14675 /* cd 58 ldr r5, [r1, r3] */
14676 0x58cd,
14677 };
14678
14679 instruction_reader_selftest<uint16_t> reader (insns);
14680 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14681 THUMB_INSN_SIZE_BYTES);
14682
14683 SELF_CHECK (ret == 0);
14684 SELF_CHECK (arm_record.mem_rec_count == 0);
14685 SELF_CHECK (arm_record.reg_rec_count == 1);
14686 SELF_CHECK (arm_record.arm_regs[0] == 3);
14687
14688 arm_record.this_addr += 2;
14689 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14690 THUMB_INSN_SIZE_BYTES);
14691
14692 SELF_CHECK (ret == 0);
14693 SELF_CHECK (arm_record.mem_rec_count == 0);
14694 SELF_CHECK (arm_record.reg_rec_count == 1);
14695 SELF_CHECK (arm_record.arm_regs[0] == 5);
14696 }
14697
14698 /* 32-bit Thumb-2 instructions. */
14699 {
14700 arm_insn_decode_record arm_record;
14701
14702 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14703 arm_record.gdbarch = gdbarch;
14704
14705 /* Use the endian-free representation of the instruction here. The test
14706 will handle endianness conversions. */
14707 static const uint32_t insns[] = {
14708 /* mrc 15, 0, r7, cr13, cr0, {3} */
14709 0x7f70ee1d,
14710 };
14711
14712 instruction_reader_selftest<uint32_t> reader (insns);
14713 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14714 THUMB2_INSN_SIZE_BYTES);
14715
14716 SELF_CHECK (ret == 0);
14717 SELF_CHECK (arm_record.mem_rec_count == 0);
14718 SELF_CHECK (arm_record.reg_rec_count == 1);
14719 SELF_CHECK (arm_record.arm_regs[0] == 7);
14720 }
14721
14722 /* 32-bit instructions. */
14723 {
14724 arm_insn_decode_record arm_record;
14725
14726 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14727 arm_record.gdbarch = gdbarch;
14728
14729 /* Use the endian-free representation of the instruction here. The test
14730 will handle endianness conversions. */
14731 static const uint32_t insns[] = {
14732 /* mov r5, r0 */
14733 0xe1a05000,
14734 };
14735
14736 instruction_reader_selftest<uint32_t> reader (insns);
14737 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14738 ARM_INSN_SIZE_BYTES);
14739
14740 SELF_CHECK (ret == 0);
14741 }
14742 }
14743
14744 /* Instruction reader from manually cooked instruction sequences. */
14745
14746 class test_arm_instruction_reader : public arm_instruction_reader
14747 {
14748 public:
14749 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14750 : m_insns (insns)
14751 {}
14752
14753 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14754 {
14755 SELF_CHECK (memaddr % 4 == 0);
14756 SELF_CHECK (memaddr / 4 < m_insns.size ());
14757
14758 return m_insns[memaddr / 4];
14759 }
14760
14761 private:
14762 const gdb::array_view<const uint32_t> m_insns;
14763 };
14764
14765 static void
14766 arm_analyze_prologue_test ()
14767 {
14768 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14769 {
14770 struct gdbarch_info info;
14771 info.byte_order = endianness;
14772 info.byte_order_for_code = endianness;
14773 info.bfd_arch_info = bfd_scan_arch ("arm");
14774
14775 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14776
14777 SELF_CHECK (gdbarch != NULL);
14778
14779 /* The "sub" instruction contains an immediate value rotate count of 0,
14780 which resulted in a 32-bit shift of a 32-bit value, caught by
14781 UBSan. */
14782 const uint32_t insns[] = {
14783 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14784 0xe1a05000, /* mov r5, r0 */
14785 0xe5903020, /* ldr r3, [r0, #32] */
14786 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14787 };
14788
14789 test_arm_instruction_reader mem_reader (insns);
14790 arm_prologue_cache cache;
14791 arm_cache_init (&cache, gdbarch);
14792
14793 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14794 }
14795 }
14796
14797 } // namespace selftests
14798 #endif /* GDB_SELF_TEST */
14799
14800 /* Cleans up local record registers and memory allocations. */
14801
14802 static void
14803 deallocate_reg_mem (arm_insn_decode_record *record)
14804 {
14805 xfree (record->arm_regs);
14806 xfree (record->arm_mems);
14807 }
14808
14809
14810 /* Parse the current instruction and record the values of the registers and
14811 memory that will be changed in current instruction to record_arch_list".
14812 Return -1 if something is wrong. */
14813
14814 int
14815 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14816 CORE_ADDR insn_addr)
14817 {
14818
14819 uint32_t no_of_rec = 0;
14820 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14821 ULONGEST t_bit = 0, insn_id = 0;
14822
14823 ULONGEST u_regval = 0;
14824
14825 arm_insn_decode_record arm_record;
14826
14827 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14828 arm_record.regcache = regcache;
14829 arm_record.this_addr = insn_addr;
14830 arm_record.gdbarch = gdbarch;
14831
14832
14833 if (record_debug > 1)
14834 {
14835 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14836 "addr = %s\n",
14837 paddress (gdbarch, arm_record.this_addr));
14838 }
14839
14840 instruction_reader reader;
14841 enum bfd_endian code_endian
14842 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14843 arm_record.arm_insn
14844 = reader.read (arm_record.this_addr, 2, code_endian);
14845
14846 /* Check the insn, whether it is thumb or arm one. */
14847
14848 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14849 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14850
14851
14852 if (!(u_regval & t_bit))
14853 {
14854 /* We are decoding arm insn. */
14855 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14856 }
14857 else
14858 {
14859 insn_id = bits (arm_record.arm_insn, 11, 15);
14860 /* is it thumb2 insn? */
14861 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14862 {
14863 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14864 THUMB2_INSN_SIZE_BYTES);
14865 }
14866 else
14867 {
14868 /* We are decoding thumb insn. */
14869 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14870 THUMB_INSN_SIZE_BYTES);
14871 }
14872 }
14873
14874 if (0 == ret)
14875 {
14876 /* Record registers. */
14877 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14878 if (arm_record.arm_regs)
14879 {
14880 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14881 {
14882 if (record_full_arch_list_add_reg
14883 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14884 ret = -1;
14885 }
14886 }
14887 /* Record memories. */
14888 if (arm_record.arm_mems)
14889 {
14890 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14891 {
14892 if (record_full_arch_list_add_mem
14893 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14894 arm_record.arm_mems[no_of_rec].len))
14895 ret = -1;
14896 }
14897 }
14898
14899 if (record_full_arch_list_add_end ())
14900 ret = -1;
14901 }
14902
14903
14904 deallocate_reg_mem (&arm_record);
14905
14906 return ret;
14907 }
14908
14909 /* See arm-tdep.h. */
14910
14911 const target_desc *
14912 arm_read_description (arm_fp_type fp_type, bool tls)
14913 {
14914 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14915
14916 if (tdesc == nullptr)
14917 {
14918 tdesc = arm_create_target_description (fp_type, tls);
14919 tdesc_arm_list[fp_type][tls] = tdesc;
14920 }
14921
14922 return tdesc;
14923 }
14924
14925 /* See arm-tdep.h. */
14926
14927 const target_desc *
14928 arm_read_mprofile_description (arm_m_profile_type m_type)
14929 {
14930 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14931
14932 if (tdesc == nullptr)
14933 {
14934 tdesc = arm_create_mprofile_target_description (m_type);
14935 tdesc_arm_mprofile_list[m_type] = tdesc;
14936 }
14937
14938 return tdesc;
14939 }