sim: mips: merge mips64vr4300 with existing multi-run build
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2.h"
42 #include "dwarf2/frame.h"
43 #include "gdbtypes.h"
44 #include "prologue-value.h"
45 #include "remote.h"
46 #include "target-descriptions.h"
47 #include "user-regs.h"
48 #include "observable.h"
49 #include "count-one-bits.h"
50
51 #include "arch/arm.h"
52 #include "arch/arm-get-next-pcs.h"
53 #include "arm-tdep.h"
54 #include "sim/sim-arm.h"
55
56 #include "elf-bfd.h"
57 #include "coff/internal.h"
58 #include "elf/arm.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "producer.h"
65
66 #if GDB_SELF_TEST
67 #include "gdbsupport/selftest.h"
68 #endif
69
70 static bool arm_debug;
71
72 /* Print an "arm" debug statement. */
73
74 #define arm_debug_printf(fmt, ...) \
75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76
77 /* Macros for setting and testing a bit in a minimal symbol that marks
78 it as Thumb function. The MSB of the minimal symbol's "info" field
79 is used for this purpose.
80
81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83
84 #define MSYMBOL_SET_SPECIAL(msym) \
85 (msym)->set_target_flag_1 (true)
86
87 #define MSYMBOL_IS_SPECIAL(msym) \
88 (msym)->target_flag_1 ()
89
90 struct arm_mapping_symbol
91 {
92 CORE_ADDR value;
93 char type;
94
95 bool operator< (const arm_mapping_symbol &other) const
96 { return this->value < other.value; }
97 };
98
99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
100
101 struct arm_per_bfd
102 {
103 explicit arm_per_bfd (size_t num_sections)
104 : section_maps (new arm_mapping_symbol_vec[num_sections]),
105 section_maps_sorted (new bool[num_sections] ())
106 {}
107
108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109
110 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111
112 The format is an array of vectors of arm_mapping_symbols, there is one
113 vector for each section of the objfile (the array is index by BFD section
114 index).
115
116 For each section, the vector of arm_mapping_symbol is sorted by
117 symbol value (address). */
118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119
120 /* For each corresponding element of section_maps above, is this vector
121 sorted. */
122 std::unique_ptr<bool[]> section_maps_sorted;
123 };
124
125 /* Per-bfd data used for mapping symbols. */
126 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
127
128 /* The list of available "set arm ..." and "show arm ..." commands. */
129 static struct cmd_list_element *setarmcmdlist = NULL;
130 static struct cmd_list_element *showarmcmdlist = NULL;
131
132 /* The type of floating-point to use. Keep this in sync with enum
133 arm_float_model, and the help string in _initialize_arm_tdep. */
134 static const char *const fp_model_strings[] =
135 {
136 "auto",
137 "softfpa",
138 "fpa",
139 "softvfp",
140 "vfp",
141 NULL
142 };
143
144 /* A variable that can be configured by the user. */
145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
146 static const char *current_fp_model = "auto";
147
148 /* The ABI to use. Keep this in sync with arm_abi_kind. */
149 static const char *const arm_abi_strings[] =
150 {
151 "auto",
152 "APCS",
153 "AAPCS",
154 NULL
155 };
156
157 /* A variable that can be configured by the user. */
158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
159 static const char *arm_abi_string = "auto";
160
161 /* The execution mode to assume. */
162 static const char *const arm_mode_strings[] =
163 {
164 "auto",
165 "arm",
166 "thumb",
167 NULL
168 };
169
170 static const char *arm_fallback_mode_string = "auto";
171 static const char *arm_force_mode_string = "auto";
172
173 /* The standard register names, and all the valid aliases for them. Note
174 that `fp', `sp' and `pc' are not added in this alias list, because they
175 have been added as builtin user registers in
176 std-regs.c:_initialize_frame_reg. */
177 static const struct
178 {
179 const char *name;
180 int regnum;
181 } arm_register_aliases[] = {
182 /* Basic register numbers. */
183 { "r0", 0 },
184 { "r1", 1 },
185 { "r2", 2 },
186 { "r3", 3 },
187 { "r4", 4 },
188 { "r5", 5 },
189 { "r6", 6 },
190 { "r7", 7 },
191 { "r8", 8 },
192 { "r9", 9 },
193 { "r10", 10 },
194 { "r11", 11 },
195 { "r12", 12 },
196 { "r13", 13 },
197 { "r14", 14 },
198 { "r15", 15 },
199 /* Synonyms (argument and variable registers). */
200 { "a1", 0 },
201 { "a2", 1 },
202 { "a3", 2 },
203 { "a4", 3 },
204 { "v1", 4 },
205 { "v2", 5 },
206 { "v3", 6 },
207 { "v4", 7 },
208 { "v5", 8 },
209 { "v6", 9 },
210 { "v7", 10 },
211 { "v8", 11 },
212 /* Other platform-specific names for r9. */
213 { "sb", 9 },
214 { "tr", 9 },
215 /* Special names. */
216 { "ip", 12 },
217 { "lr", 14 },
218 /* Names used by GCC (not listed in the ARM EABI). */
219 { "sl", 10 },
220 /* A special name from the older ATPCS. */
221 { "wr", 7 },
222 };
223
224 static const char *const arm_register_names[] =
225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
231 "fps", "cpsr" }; /* 24 25 */
232
233 /* Holds the current set of options to be passed to the disassembler. */
234 static char *arm_disassembler_options;
235
236 /* Valid register name styles. */
237 static const char **valid_disassembly_styles;
238
239 /* Disassembly style to use. Default to "std" register names. */
240 static const char *disassembly_style;
241
242 /* All possible arm target descriptors. */
243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245
246 /* This is used to keep the bfd arch_info in sync with the disassembly
247 style. */
248 static void set_disassembly_style_sfunc (const char *, int,
249 struct cmd_list_element *);
250 static void show_disassembly_style_sfunc (struct ui_file *, int,
251 struct cmd_list_element *,
252 const char *);
253
254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
255 readable_regcache *regcache,
256 int regnum, gdb_byte *buf);
257 static void arm_neon_quad_write (struct gdbarch *gdbarch,
258 struct regcache *regcache,
259 int regnum, const gdb_byte *buf);
260
261 static CORE_ADDR
262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
263
264
265 /* get_next_pcs operations. */
266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
267 arm_get_next_pcs_read_memory_unsigned_integer,
268 arm_get_next_pcs_syscall_next_pc,
269 arm_get_next_pcs_addr_bits_remove,
270 arm_get_next_pcs_is_thumb,
271 NULL,
272 };
273
274 struct arm_prologue_cache
275 {
276 /* The stack pointer at the time this frame was created; i.e. the
277 caller's stack pointer when this function was called. It is used
278 to identify this frame. */
279 CORE_ADDR sp;
280
281 /* Additional stack pointers used by M-profile with Security extension. */
282 /* Use msp_s / psp_s to hold the values of msp / psp when there is
283 no Security extension. */
284 CORE_ADDR msp_s;
285 CORE_ADDR msp_ns;
286 CORE_ADDR psp_s;
287 CORE_ADDR psp_ns;
288
289 /* Active stack pointer. */
290 int active_sp_regnum;
291 int active_msp_regnum;
292 int active_psp_regnum;
293
294 /* The frame base for this frame is just prev_sp - frame size.
295 FRAMESIZE is the distance from the frame pointer to the
296 initial stack pointer. */
297
298 int framesize;
299
300 /* The register used to hold the frame pointer for this frame. */
301 int framereg;
302
303 /* True if the return address is signed, false otherwise. */
304 gdb::optional<bool> ra_signed_state;
305
306 /* Saved register offsets. */
307 trad_frame_saved_reg *saved_regs;
308
309 arm_prologue_cache() = default;
310 };
311
312
313 /* Reconstruct T bit in program status register from LR value. */
314
315 static inline ULONGEST
316 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
317 {
318 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
319 if (IS_THUMB_ADDR (lr))
320 psr |= t_bit;
321 else
322 psr &= ~t_bit;
323
324 return psr;
325 }
326
327 /* Initialize CACHE fields for which zero is not adequate (CACHE is
328 expected to have been ZALLOC'ed before calling this function). */
329
330 static void
331 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
332 {
333 cache->active_sp_regnum = ARM_SP_REGNUM;
334
335 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
336 }
337
338 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
339
340 static void
341 arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame)
342 {
343 struct gdbarch *gdbarch = get_frame_arch (frame);
344 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
345
346 arm_cache_init (cache, gdbarch);
347 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
348
349 if (tdep->have_sec_ext)
350 {
351 const CORE_ADDR msp_val
352 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
353 const CORE_ADDR psp_val
354 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
355
356 cache->msp_s
357 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
358 cache->msp_ns
359 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
360 cache->psp_s
361 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
362 cache->psp_ns
363 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
364
365 /* Identify what msp is alias for (msp_s or msp_ns). */
366 if (msp_val == cache->msp_s)
367 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
368 else if (msp_val == cache->msp_ns)
369 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
370 else
371 {
372 warning (_("Invalid state, unable to determine msp alias, assuming "
373 "msp_s."));
374 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
375 }
376
377 /* Identify what psp is alias for (psp_s or psp_ns). */
378 if (psp_val == cache->psp_s)
379 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
380 else if (psp_val == cache->psp_ns)
381 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
382 else
383 {
384 warning (_("Invalid state, unable to determine psp alias, assuming "
385 "psp_s."));
386 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
387 }
388
389 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
390 if (msp_val == cache->sp)
391 cache->active_sp_regnum = cache->active_msp_regnum;
392 else if (psp_val == cache->sp)
393 cache->active_sp_regnum = cache->active_psp_regnum;
394 else
395 {
396 warning (_("Invalid state, unable to determine sp alias, assuming "
397 "msp."));
398 cache->active_sp_regnum = cache->active_msp_regnum;
399 }
400 }
401 else if (tdep->is_m)
402 {
403 cache->msp_s
404 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
405 cache->psp_s
406 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
407
408 /* Identify what sp is alias for (msp or psp). */
409 if (cache->msp_s == cache->sp)
410 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
411 else if (cache->psp_s == cache->sp)
412 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
413 else
414 {
415 warning (_("Invalid state, unable to determine sp alias, assuming "
416 "msp."));
417 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
418 }
419 }
420 else
421 {
422 cache->msp_s
423 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
424
425 cache->active_sp_regnum = ARM_SP_REGNUM;
426 }
427 }
428
429 /* Return the requested stack pointer value (in REGNUM), taking into
430 account whether we have a Security extension or an M-profile
431 CPU. */
432
433 static CORE_ADDR
434 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
435 arm_gdbarch_tdep *tdep, int regnum)
436 {
437 if (tdep->have_sec_ext)
438 {
439 if (regnum == tdep->m_profile_msp_s_regnum)
440 return cache->msp_s;
441 if (regnum == tdep->m_profile_msp_ns_regnum)
442 return cache->msp_ns;
443 if (regnum == tdep->m_profile_psp_s_regnum)
444 return cache->psp_s;
445 if (regnum == tdep->m_profile_psp_ns_regnum)
446 return cache->psp_ns;
447 if (regnum == tdep->m_profile_msp_regnum)
448 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
449 if (regnum == tdep->m_profile_psp_regnum)
450 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
451 if (regnum == ARM_SP_REGNUM)
452 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
453 }
454 else if (tdep->is_m)
455 {
456 if (regnum == tdep->m_profile_msp_regnum)
457 return cache->msp_s;
458 if (regnum == tdep->m_profile_psp_regnum)
459 return cache->psp_s;
460 if (regnum == ARM_SP_REGNUM)
461 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
462 }
463 else if (regnum == ARM_SP_REGNUM)
464 return cache->sp;
465
466 gdb_assert_not_reached ("Invalid SP selection");
467 }
468
469 /* Return the previous stack address, depending on which SP register
470 is active. */
471
472 static CORE_ADDR
473 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
474 {
475 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
476 return val;
477 }
478
479 /* Set the active stack pointer to VAL. */
480
481 static void
482 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
483 arm_gdbarch_tdep *tdep, CORE_ADDR val)
484 {
485 if (tdep->have_sec_ext)
486 {
487 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
488 cache->msp_s = val;
489 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
490 cache->msp_ns = val;
491 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
492 cache->psp_s = val;
493 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
494 cache->psp_ns = val;
495
496 return;
497 }
498 else if (tdep->is_m)
499 {
500 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
501 cache->msp_s = val;
502 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
503 cache->psp_s = val;
504
505 return;
506 }
507 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
508 {
509 cache->sp = val;
510 return;
511 }
512
513 gdb_assert_not_reached ("Invalid SP selection");
514 }
515
516 /* Return true if REGNUM is one of the alternative stack pointers. */
517
518 static bool
519 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
520 {
521 if ((regnum == tdep->m_profile_msp_regnum)
522 || (regnum == tdep->m_profile_msp_s_regnum)
523 || (regnum == tdep->m_profile_msp_ns_regnum)
524 || (regnum == tdep->m_profile_psp_regnum)
525 || (regnum == tdep->m_profile_psp_s_regnum)
526 || (regnum == tdep->m_profile_psp_ns_regnum))
527 return true;
528 else
529 return false;
530 }
531
532 /* Set the active stack pointer to SP_REGNUM. */
533
534 static void
535 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
536 arm_gdbarch_tdep *tdep, int sp_regnum)
537 {
538 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
539
540 if (tdep->have_sec_ext)
541 {
542 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
543 && sp_regnum != tdep->m_profile_psp_regnum);
544
545 if (sp_regnum == tdep->m_profile_msp_s_regnum
546 || sp_regnum == tdep->m_profile_psp_s_regnum)
547 {
548 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
549 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
550 }
551 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
552 || sp_regnum == tdep->m_profile_psp_ns_regnum)
553 {
554 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
555 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
556 }
557 }
558
559 cache->active_sp_regnum = sp_regnum;
560 }
561
562 namespace {
563
564 /* Abstract class to read ARM instructions from memory. */
565
566 class arm_instruction_reader
567 {
568 public:
569 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
570 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
571 };
572
573 /* Read instructions from target memory. */
574
575 class target_arm_instruction_reader : public arm_instruction_reader
576 {
577 public:
578 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
579 {
580 return read_code_unsigned_integer (memaddr, 4, byte_order);
581 }
582 };
583
584 } /* namespace */
585
586 static CORE_ADDR arm_analyze_prologue
587 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
588 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
589
590 /* Architecture version for displaced stepping. This effects the behaviour of
591 certain instructions, and really should not be hard-wired. */
592
593 #define DISPLACED_STEPPING_ARCH_VERSION 5
594
595 /* See arm-tdep.h. */
596
597 bool arm_apcs_32 = true;
598 bool arm_unwind_secure_frames = true;
599
600 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
601
602 int
603 arm_psr_thumb_bit (struct gdbarch *gdbarch)
604 {
605 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
606
607 if (tdep->is_m)
608 return XPSR_T;
609 else
610 return CPSR_T;
611 }
612
613 /* Determine if the processor is currently executing in Thumb mode. */
614
615 int
616 arm_is_thumb (struct regcache *regcache)
617 {
618 ULONGEST cpsr;
619 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
620
621 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
622
623 return (cpsr & t_bit) != 0;
624 }
625
626 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
627 frame. */
628
629 int
630 arm_frame_is_thumb (frame_info_ptr frame)
631 {
632 /* Check the architecture of FRAME. */
633 struct gdbarch *gdbarch = get_frame_arch (frame);
634 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
635
636 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
637 directly (from a signal frame or dummy frame) or by interpreting
638 the saved LR (from a prologue or DWARF frame). So consult it and
639 trust the unwinders. */
640 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
641
642 /* Find and extract the thumb bit. */
643 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
644 return (cpsr & t_bit) != 0;
645 }
646
647 /* Search for the mapping symbol covering MEMADDR. If one is found,
648 return its type. Otherwise, return 0. If START is non-NULL,
649 set *START to the location of the mapping symbol. */
650
651 static char
652 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
653 {
654 struct obj_section *sec;
655
656 /* If there are mapping symbols, consult them. */
657 sec = find_pc_section (memaddr);
658 if (sec != NULL)
659 {
660 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
661 if (data != NULL)
662 {
663 unsigned int section_idx = sec->the_bfd_section->index;
664 arm_mapping_symbol_vec &map
665 = data->section_maps[section_idx];
666
667 /* Sort the vector on first use. */
668 if (!data->section_maps_sorted[section_idx])
669 {
670 std::sort (map.begin (), map.end ());
671 data->section_maps_sorted[section_idx] = true;
672 }
673
674 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
675 arm_mapping_symbol_vec::const_iterator it
676 = std::lower_bound (map.begin (), map.end (), map_key);
677
678 /* std::lower_bound finds the earliest ordered insertion
679 point. If the symbol at this position starts at this exact
680 address, we use that; otherwise, the preceding
681 mapping symbol covers this address. */
682 if (it < map.end ())
683 {
684 if (it->value == map_key.value)
685 {
686 if (start)
687 *start = it->value + sec->addr ();
688 return it->type;
689 }
690 }
691
692 if (it > map.begin ())
693 {
694 arm_mapping_symbol_vec::const_iterator prev_it
695 = it - 1;
696
697 if (start)
698 *start = prev_it->value + sec->addr ();
699 return prev_it->type;
700 }
701 }
702 }
703
704 return 0;
705 }
706
707 /* Determine if the program counter specified in MEMADDR is in a Thumb
708 function. This function should be called for addresses unrelated to
709 any executing frame; otherwise, prefer arm_frame_is_thumb. */
710
711 int
712 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
713 {
714 struct bound_minimal_symbol sym;
715 char type;
716 arm_displaced_step_copy_insn_closure *dsc = nullptr;
717 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
718
719 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
720 dsc = ((arm_displaced_step_copy_insn_closure * )
721 gdbarch_displaced_step_copy_insn_closure_by_addr
722 (gdbarch, current_inferior (), memaddr));
723
724 /* If checking the mode of displaced instruction in copy area, the mode
725 should be determined by instruction on the original address. */
726 if (dsc)
727 {
728 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
729 (unsigned long) dsc->insn_addr,
730 (unsigned long) memaddr);
731 memaddr = dsc->insn_addr;
732 }
733
734 /* If bit 0 of the address is set, assume this is a Thumb address. */
735 if (IS_THUMB_ADDR (memaddr))
736 return 1;
737
738 /* If the user wants to override the symbol table, let him. */
739 if (strcmp (arm_force_mode_string, "arm") == 0)
740 return 0;
741 if (strcmp (arm_force_mode_string, "thumb") == 0)
742 return 1;
743
744 /* ARM v6-M and v7-M are always in Thumb mode. */
745 if (tdep->is_m)
746 return 1;
747
748 /* If there are mapping symbols, consult them. */
749 type = arm_find_mapping_symbol (memaddr, NULL);
750 if (type)
751 return type == 't';
752
753 /* Thumb functions have a "special" bit set in minimal symbols. */
754 sym = lookup_minimal_symbol_by_pc (memaddr);
755 if (sym.minsym)
756 return (MSYMBOL_IS_SPECIAL (sym.minsym));
757
758 /* If the user wants to override the fallback mode, let them. */
759 if (strcmp (arm_fallback_mode_string, "arm") == 0)
760 return 0;
761 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
762 return 1;
763
764 /* If we couldn't find any symbol, but we're talking to a running
765 target, then trust the current value of $cpsr. This lets
766 "display/i $pc" always show the correct mode (though if there is
767 a symbol table we will not reach here, so it still may not be
768 displayed in the mode it will be executed). */
769 if (target_has_registers ())
770 return arm_frame_is_thumb (get_current_frame ());
771
772 /* Otherwise we're out of luck; we assume ARM. */
773 return 0;
774 }
775
776 static inline bool
777 arm_m_addr_is_lockup (CORE_ADDR addr)
778 {
779 switch (addr)
780 {
781 /* Values for lockup state.
782 For more details see "B1.5.15 Unrecoverable exception cases" in
783 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
784 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
785 case 0xeffffffe:
786 case 0xfffffffe:
787 case 0xffffffff:
788 return true;
789
790 default:
791 /* Address is not lockup. */
792 return false;
793 }
794 }
795
796 /* Determine if the address specified equals any of these magic return
797 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
798 architectures. Also include lockup magic PC value.
799 Check also for FNC_RETURN if we have the v8-M security extension.
800
801 From ARMv6-M Reference Manual B1.5.8
802 Table B1-5 Exception return behavior
803
804 EXC_RETURN Return To Return Stack
805 0xFFFFFFF1 Handler mode Main
806 0xFFFFFFF9 Thread mode Main
807 0xFFFFFFFD Thread mode Process
808
809 From ARMv7-M Reference Manual B1.5.8
810 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
811
812 EXC_RETURN Return To Return Stack
813 0xFFFFFFF1 Handler mode Main
814 0xFFFFFFF9 Thread mode Main
815 0xFFFFFFFD Thread mode Process
816
817 Table B1-9 EXC_RETURN definition of exception return behavior, with
818 FP
819
820 EXC_RETURN Return To Return Stack Frame Type
821 0xFFFFFFE1 Handler mode Main Extended
822 0xFFFFFFE9 Thread mode Main Extended
823 0xFFFFFFED Thread mode Process Extended
824 0xFFFFFFF1 Handler mode Main Basic
825 0xFFFFFFF9 Thread mode Main Basic
826 0xFFFFFFFD Thread mode Process Basic
827
828 For more details see "B1.5.8 Exception return behavior"
829 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
830
831 From ARMv8-M Architecture Technical Reference, D1.2.95
832 FType, Mode and SPSEL bits are to be considered when the Security
833 Extension is not implemented.
834
835 EXC_RETURN Return To Return Stack Frame Type
836 0xFFFFFFA0 Handler mode Main Extended
837 0xFFFFFFA8 Thread mode Main Extended
838 0xFFFFFFAC Thread mode Process Extended
839 0xFFFFFFB0 Handler mode Main Standard
840 0xFFFFFFB8 Thread mode Main Standard
841 0xFFFFFFBC Thread mode Process Standard */
842
843 static int
844 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
845 {
846 if (arm_m_addr_is_lockup (addr))
847 return 1;
848
849 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
850 if (tdep->have_sec_ext)
851 {
852 switch ((addr & 0xff000000))
853 {
854 case 0xff000000: /* EXC_RETURN pattern. */
855 case 0xfe000000: /* FNC_RETURN pattern. */
856 return 1;
857 default:
858 return 0;
859 }
860 }
861 else
862 {
863 switch (addr)
864 {
865 /* Values from ARMv8-M Architecture Technical Reference. */
866 case 0xffffffa0:
867 case 0xffffffa8:
868 case 0xffffffac:
869 case 0xffffffb0:
870 case 0xffffffb8:
871 case 0xffffffbc:
872 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
873 the exception return behavior. */
874 case 0xffffffe1:
875 case 0xffffffe9:
876 case 0xffffffed:
877 case 0xfffffff1:
878 case 0xfffffff9:
879 case 0xfffffffd:
880 /* Address is magic. */
881 return 1;
882
883 default:
884 /* Address is not magic. */
885 return 0;
886 }
887 }
888 }
889
890 /* Remove useless bits from addresses in a running program. */
891 static CORE_ADDR
892 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
893 {
894 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
895
896 /* On M-profile devices, do not strip the low bit from EXC_RETURN
897 (the magic exception return address). */
898 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
899 return val;
900
901 if (arm_apcs_32)
902 return UNMAKE_THUMB_ADDR (val);
903 else
904 return (val & 0x03fffffc);
905 }
906
907 /* Return 1 if PC is the start of a compiler helper function which
908 can be safely ignored during prologue skipping. IS_THUMB is true
909 if the function is known to be a Thumb function due to the way it
910 is being called. */
911 static int
912 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
913 {
914 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
915 struct bound_minimal_symbol msym;
916
917 msym = lookup_minimal_symbol_by_pc (pc);
918 if (msym.minsym != NULL
919 && msym.value_address () == pc
920 && msym.minsym->linkage_name () != NULL)
921 {
922 const char *name = msym.minsym->linkage_name ();
923
924 /* The GNU linker's Thumb call stub to foo is named
925 __foo_from_thumb. */
926 if (strstr (name, "_from_thumb") != NULL)
927 name += 2;
928
929 /* On soft-float targets, __truncdfsf2 is called to convert promoted
930 arguments to their argument types in non-prototyped
931 functions. */
932 if (startswith (name, "__truncdfsf2"))
933 return 1;
934 if (startswith (name, "__aeabi_d2f"))
935 return 1;
936
937 /* Internal functions related to thread-local storage. */
938 if (startswith (name, "__tls_get_addr"))
939 return 1;
940 if (startswith (name, "__aeabi_read_tp"))
941 return 1;
942 }
943 else
944 {
945 /* If we run against a stripped glibc, we may be unable to identify
946 special functions by name. Check for one important case,
947 __aeabi_read_tp, by comparing the *code* against the default
948 implementation (this is hand-written ARM assembler in glibc). */
949
950 if (!is_thumb
951 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
952 == 0xe3e00a0f /* mov r0, #0xffff0fff */
953 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
954 == 0xe240f01f) /* sub pc, r0, #31 */
955 return 1;
956 }
957
958 return 0;
959 }
960
961 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
962 the first 16-bit of instruction, and INSN2 is the second 16-bit of
963 instruction. */
964 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
965 ((bits ((insn1), 0, 3) << 12) \
966 | (bits ((insn1), 10, 10) << 11) \
967 | (bits ((insn2), 12, 14) << 8) \
968 | bits ((insn2), 0, 7))
969
970 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
971 the 32-bit instruction. */
972 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
973 ((bits ((insn), 16, 19) << 12) \
974 | bits ((insn), 0, 11))
975
976 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
977
978 static unsigned int
979 thumb_expand_immediate (unsigned int imm)
980 {
981 unsigned int count = imm >> 7;
982
983 if (count < 8)
984 switch (count / 2)
985 {
986 case 0:
987 return imm & 0xff;
988 case 1:
989 return (imm & 0xff) | ((imm & 0xff) << 16);
990 case 2:
991 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
992 case 3:
993 return (imm & 0xff) | ((imm & 0xff) << 8)
994 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
995 }
996
997 return (0x80 | (imm & 0x7f)) << (32 - count);
998 }
999
1000 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1001 epilogue, 0 otherwise. */
1002
1003 static int
1004 thumb_instruction_restores_sp (unsigned short insn)
1005 {
1006 return (insn == 0x46bd /* mov sp, r7 */
1007 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1008 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1009 }
1010
1011 /* Analyze a Thumb prologue, looking for a recognizable stack frame
1012 and frame pointer. Scan until we encounter a store that could
1013 clobber the stack frame unexpectedly, or an unknown instruction.
1014 Return the last address which is definitely safe to skip for an
1015 initial breakpoint. */
1016
1017 static CORE_ADDR
1018 thumb_analyze_prologue (struct gdbarch *gdbarch,
1019 CORE_ADDR start, CORE_ADDR limit,
1020 struct arm_prologue_cache *cache)
1021 {
1022 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1023 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1024 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1025 int i;
1026 pv_t regs[16];
1027 CORE_ADDR offset;
1028 CORE_ADDR unrecognized_pc = 0;
1029
1030 for (i = 0; i < 16; i++)
1031 regs[i] = pv_register (i, 0);
1032 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1033
1034 while (start < limit)
1035 {
1036 unsigned short insn;
1037 gdb::optional<bool> ra_signed_state;
1038
1039 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1040
1041 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1042 {
1043 int regno;
1044 int mask;
1045
1046 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1047 break;
1048
1049 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1050 whether to save LR (R14). */
1051 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1052
1053 /* Calculate offsets of saved R0-R7 and LR. */
1054 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1055 if (mask & (1 << regno))
1056 {
1057 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1058 -4);
1059 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1060 }
1061 }
1062 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1063 {
1064 offset = (insn & 0x7f) << 2; /* get scaled offset */
1065 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1066 -offset);
1067 }
1068 else if (thumb_instruction_restores_sp (insn))
1069 {
1070 /* Don't scan past the epilogue. */
1071 break;
1072 }
1073 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1074 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1075 (insn & 0xff) << 2);
1076 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1077 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1078 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1079 bits (insn, 6, 8));
1080 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1081 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1082 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1083 bits (insn, 0, 7));
1084 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1085 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1086 && pv_is_constant (regs[bits (insn, 3, 5)]))
1087 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1088 regs[bits (insn, 6, 8)]);
1089 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1090 && pv_is_constant (regs[bits (insn, 3, 6)]))
1091 {
1092 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1093 int rm = bits (insn, 3, 6);
1094 regs[rd] = pv_add (regs[rd], regs[rm]);
1095 }
1096 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1097 {
1098 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1099 int src_reg = (insn & 0x78) >> 3;
1100 regs[dst_reg] = regs[src_reg];
1101 }
1102 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1103 {
1104 /* Handle stores to the stack. Normally pushes are used,
1105 but with GCC -mtpcs-frame, there may be other stores
1106 in the prologue to create the frame. */
1107 int regno = (insn >> 8) & 0x7;
1108 pv_t addr;
1109
1110 offset = (insn & 0xff) << 2;
1111 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1112
1113 if (stack.store_would_trash (addr))
1114 break;
1115
1116 stack.store (addr, 4, regs[regno]);
1117 }
1118 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1119 {
1120 int rd = bits (insn, 0, 2);
1121 int rn = bits (insn, 3, 5);
1122 pv_t addr;
1123
1124 offset = bits (insn, 6, 10) << 2;
1125 addr = pv_add_constant (regs[rn], offset);
1126
1127 if (stack.store_would_trash (addr))
1128 break;
1129
1130 stack.store (addr, 4, regs[rd]);
1131 }
1132 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1133 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1134 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1135 /* Ignore stores of argument registers to the stack. */
1136 ;
1137 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1138 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1139 /* Ignore block loads from the stack, potentially copying
1140 parameters from memory. */
1141 ;
1142 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1143 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1144 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1145 /* Similarly ignore single loads from the stack. */
1146 ;
1147 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1148 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1149 /* Skip register copies, i.e. saves to another register
1150 instead of the stack. */
1151 ;
1152 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1153 /* Recognize constant loads; even with small stacks these are necessary
1154 on Thumb. */
1155 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1156 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1157 {
1158 /* Constant pool loads, for the same reason. */
1159 unsigned int constant;
1160 CORE_ADDR loc;
1161
1162 loc = start + 4 + bits (insn, 0, 7) * 4;
1163 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1164 regs[bits (insn, 8, 10)] = pv_constant (constant);
1165 }
1166 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1167 {
1168 unsigned short inst2;
1169
1170 inst2 = read_code_unsigned_integer (start + 2, 2,
1171 byte_order_for_code);
1172 uint32_t whole_insn = (insn << 16) | inst2;
1173
1174 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1175 {
1176 /* BL, BLX. Allow some special function calls when
1177 skipping the prologue; GCC generates these before
1178 storing arguments to the stack. */
1179 CORE_ADDR nextpc;
1180 int j1, j2, imm1, imm2;
1181
1182 imm1 = sbits (insn, 0, 10);
1183 imm2 = bits (inst2, 0, 10);
1184 j1 = bit (inst2, 13);
1185 j2 = bit (inst2, 11);
1186
1187 offset = ((imm1 << 12) + (imm2 << 1));
1188 offset ^= ((!j2) << 22) | ((!j1) << 23);
1189
1190 nextpc = start + 4 + offset;
1191 /* For BLX make sure to clear the low bits. */
1192 if (bit (inst2, 12) == 0)
1193 nextpc = nextpc & 0xfffffffc;
1194
1195 if (!skip_prologue_function (gdbarch, nextpc,
1196 bit (inst2, 12) != 0))
1197 break;
1198 }
1199
1200 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1201 { registers } */
1202 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1203 {
1204 pv_t addr = regs[bits (insn, 0, 3)];
1205 int regno;
1206
1207 if (stack.store_would_trash (addr))
1208 break;
1209
1210 /* Calculate offsets of saved registers. */
1211 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1212 if (inst2 & (1 << regno))
1213 {
1214 addr = pv_add_constant (addr, -4);
1215 stack.store (addr, 4, regs[regno]);
1216 }
1217
1218 if (insn & 0x0020)
1219 regs[bits (insn, 0, 3)] = addr;
1220 }
1221
1222 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1223 else if ((insn & 0xff20) == 0xed20
1224 && (inst2 & 0x0f00) == 0x0b00
1225 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1226 {
1227 /* Address SP points to. */
1228 pv_t addr = regs[bits (insn, 0, 3)];
1229
1230 /* Number of registers saved. */
1231 unsigned int number = bits (inst2, 0, 7) >> 1;
1232
1233 /* First register to save. */
1234 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1235
1236 if (stack.store_would_trash (addr))
1237 break;
1238
1239 /* Calculate offsets of saved registers. */
1240 for (; number > 0; number--)
1241 {
1242 addr = pv_add_constant (addr, -8);
1243 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1244 + vd + number, 0));
1245 }
1246
1247 /* Writeback SP to account for the saved registers. */
1248 regs[bits (insn, 0, 3)] = addr;
1249 }
1250
1251 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1252 [Rn, #+/-imm]{!} */
1253 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1254 {
1255 int regno1 = bits (inst2, 12, 15);
1256 int regno2 = bits (inst2, 8, 11);
1257 pv_t addr = regs[bits (insn, 0, 3)];
1258
1259 offset = inst2 & 0xff;
1260 if (insn & 0x0080)
1261 addr = pv_add_constant (addr, offset);
1262 else
1263 addr = pv_add_constant (addr, -offset);
1264
1265 if (stack.store_would_trash (addr))
1266 break;
1267
1268 stack.store (addr, 4, regs[regno1]);
1269 stack.store (pv_add_constant (addr, 4),
1270 4, regs[regno2]);
1271
1272 if (insn & 0x0020)
1273 regs[bits (insn, 0, 3)] = addr;
1274 }
1275
1276 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1277 && (inst2 & 0x0c00) == 0x0c00
1278 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1279 {
1280 int regno = bits (inst2, 12, 15);
1281 pv_t addr = regs[bits (insn, 0, 3)];
1282
1283 offset = inst2 & 0xff;
1284 if (inst2 & 0x0200)
1285 addr = pv_add_constant (addr, offset);
1286 else
1287 addr = pv_add_constant (addr, -offset);
1288
1289 if (stack.store_would_trash (addr))
1290 break;
1291
1292 stack.store (addr, 4, regs[regno]);
1293
1294 if (inst2 & 0x0100)
1295 regs[bits (insn, 0, 3)] = addr;
1296 }
1297
1298 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1299 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1300 {
1301 int regno = bits (inst2, 12, 15);
1302 pv_t addr;
1303
1304 offset = inst2 & 0xfff;
1305 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1306
1307 if (stack.store_would_trash (addr))
1308 break;
1309
1310 stack.store (addr, 4, regs[regno]);
1311 }
1312
1313 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1314 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1315 /* Ignore stores of argument registers to the stack. */
1316 ;
1317
1318 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1319 && (inst2 & 0x0d00) == 0x0c00
1320 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1321 /* Ignore stores of argument registers to the stack. */
1322 ;
1323
1324 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1325 { registers } */
1326 && (inst2 & 0x8000) == 0x0000
1327 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1328 /* Ignore block loads from the stack, potentially copying
1329 parameters from memory. */
1330 ;
1331
1332 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1333 [Rn, #+/-imm] */
1334 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1335 /* Similarly ignore dual loads from the stack. */
1336 ;
1337
1338 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1339 && (inst2 & 0x0d00) == 0x0c00
1340 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1341 /* Similarly ignore single loads from the stack. */
1342 ;
1343
1344 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1345 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1346 /* Similarly ignore single loads from the stack. */
1347 ;
1348
1349 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1350 && (inst2 & 0x8000) == 0x0000)
1351 {
1352 unsigned int imm = ((bits (insn, 10, 10) << 11)
1353 | (bits (inst2, 12, 14) << 8)
1354 | bits (inst2, 0, 7));
1355
1356 regs[bits (inst2, 8, 11)]
1357 = pv_add_constant (regs[bits (insn, 0, 3)],
1358 thumb_expand_immediate (imm));
1359 }
1360
1361 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1362 && (inst2 & 0x8000) == 0x0000)
1363 {
1364 unsigned int imm = ((bits (insn, 10, 10) << 11)
1365 | (bits (inst2, 12, 14) << 8)
1366 | bits (inst2, 0, 7));
1367
1368 regs[bits (inst2, 8, 11)]
1369 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1370 }
1371
1372 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1373 && (inst2 & 0x8000) == 0x0000)
1374 {
1375 unsigned int imm = ((bits (insn, 10, 10) << 11)
1376 | (bits (inst2, 12, 14) << 8)
1377 | bits (inst2, 0, 7));
1378
1379 regs[bits (inst2, 8, 11)]
1380 = pv_add_constant (regs[bits (insn, 0, 3)],
1381 - (CORE_ADDR) thumb_expand_immediate (imm));
1382 }
1383
1384 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1385 && (inst2 & 0x8000) == 0x0000)
1386 {
1387 unsigned int imm = ((bits (insn, 10, 10) << 11)
1388 | (bits (inst2, 12, 14) << 8)
1389 | bits (inst2, 0, 7));
1390
1391 regs[bits (inst2, 8, 11)]
1392 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1393 }
1394
1395 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1396 {
1397 unsigned int imm = ((bits (insn, 10, 10) << 11)
1398 | (bits (inst2, 12, 14) << 8)
1399 | bits (inst2, 0, 7));
1400
1401 regs[bits (inst2, 8, 11)]
1402 = pv_constant (thumb_expand_immediate (imm));
1403 }
1404
1405 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1406 {
1407 unsigned int imm
1408 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1409
1410 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1411 }
1412
1413 else if (insn == 0xea5f /* mov.w Rd,Rm */
1414 && (inst2 & 0xf0f0) == 0)
1415 {
1416 int dst_reg = (inst2 & 0x0f00) >> 8;
1417 int src_reg = inst2 & 0xf;
1418 regs[dst_reg] = regs[src_reg];
1419 }
1420
1421 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1422 {
1423 /* Constant pool loads. */
1424 unsigned int constant;
1425 CORE_ADDR loc;
1426
1427 offset = bits (inst2, 0, 11);
1428 if (insn & 0x0080)
1429 loc = start + 4 + offset;
1430 else
1431 loc = start + 4 - offset;
1432
1433 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1434 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1435 }
1436
1437 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1438 {
1439 /* Constant pool loads. */
1440 unsigned int constant;
1441 CORE_ADDR loc;
1442
1443 offset = bits (inst2, 0, 7) << 2;
1444 if (insn & 0x0080)
1445 loc = start + 4 + offset;
1446 else
1447 loc = start + 4 - offset;
1448
1449 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1450 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1451
1452 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1453 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1454 }
1455 /* Start of ARMv8.1-m PACBTI extension instructions. */
1456 else if (IS_PAC (whole_insn))
1457 {
1458 /* LR and SP are input registers. PAC is in R12. LR is
1459 signed from this point onwards. NOP space. */
1460 ra_signed_state = true;
1461 }
1462 else if (IS_PACBTI (whole_insn))
1463 {
1464 /* LR and SP are input registers. PAC is in R12 and PC is a
1465 valid BTI landing pad. LR is signed from this point onwards.
1466 NOP space. */
1467 ra_signed_state = true;
1468 }
1469 else if (IS_BTI (whole_insn))
1470 {
1471 /* Valid BTI landing pad. NOP space. */
1472 }
1473 else if (IS_PACG (whole_insn))
1474 {
1475 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1476 this point onwards. */
1477 ra_signed_state = true;
1478 }
1479 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1480 {
1481 /* These instructions appear close to the epilogue, when signed
1482 pointers are getting authenticated. */
1483 ra_signed_state = false;
1484 }
1485 /* End of ARMv8.1-m PACBTI extension instructions */
1486 else if (thumb2_instruction_changes_pc (insn, inst2))
1487 {
1488 /* Don't scan past anything that might change control flow. */
1489 break;
1490 }
1491 else
1492 {
1493 /* The optimizer might shove anything into the prologue,
1494 so we just skip what we don't recognize. */
1495 unrecognized_pc = start;
1496 }
1497
1498 /* Make sure we are dealing with a target that supports ARMv8.1-m
1499 PACBTI. */
1500 if (cache != nullptr && tdep->have_pacbti
1501 && ra_signed_state.has_value ())
1502 {
1503 arm_debug_printf ("Found pacbti instruction at %s",
1504 paddress (gdbarch, start));
1505 arm_debug_printf ("RA is %s",
1506 *ra_signed_state? "signed" : "not signed");
1507 cache->ra_signed_state = ra_signed_state;
1508 }
1509
1510 start += 2;
1511 }
1512 else if (thumb_instruction_changes_pc (insn))
1513 {
1514 /* Don't scan past anything that might change control flow. */
1515 break;
1516 }
1517 else
1518 {
1519 /* The optimizer might shove anything into the prologue,
1520 so we just skip what we don't recognize. */
1521 unrecognized_pc = start;
1522 }
1523
1524 start += 2;
1525 }
1526
1527 arm_debug_printf ("Prologue scan stopped at %s",
1528 paddress (gdbarch, start));
1529
1530 if (unrecognized_pc == 0)
1531 unrecognized_pc = start;
1532
1533 if (cache == NULL)
1534 return unrecognized_pc;
1535
1536 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1537 {
1538 /* Frame pointer is fp. Frame size is constant. */
1539 cache->framereg = ARM_FP_REGNUM;
1540 cache->framesize = -regs[ARM_FP_REGNUM].k;
1541 }
1542 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1543 {
1544 /* Frame pointer is r7. Frame size is constant. */
1545 cache->framereg = THUMB_FP_REGNUM;
1546 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1547 }
1548 else
1549 {
1550 /* Try the stack pointer... this is a bit desperate. */
1551 cache->framereg = ARM_SP_REGNUM;
1552 cache->framesize = -regs[ARM_SP_REGNUM].k;
1553 }
1554
1555 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1556 if (stack.find_reg (gdbarch, i, &offset))
1557 {
1558 cache->saved_regs[i].set_addr (offset);
1559 if (i == ARM_SP_REGNUM)
1560 arm_cache_set_active_sp_value(cache, tdep, offset);
1561 }
1562
1563 return unrecognized_pc;
1564 }
1565
1566
1567 /* Try to analyze the instructions starting from PC, which load symbol
1568 __stack_chk_guard. Return the address of instruction after loading this
1569 symbol, set the dest register number to *BASEREG, and set the size of
1570 instructions for loading symbol in OFFSET. Return 0 if instructions are
1571 not recognized. */
1572
1573 static CORE_ADDR
1574 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1575 unsigned int *destreg, int *offset)
1576 {
1577 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1578 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1579 unsigned int low, high, address;
1580
1581 address = 0;
1582 if (is_thumb)
1583 {
1584 unsigned short insn1
1585 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1586
1587 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1588 {
1589 *destreg = bits (insn1, 8, 10);
1590 *offset = 2;
1591 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1592 address = read_memory_unsigned_integer (address, 4,
1593 byte_order_for_code);
1594 }
1595 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1596 {
1597 unsigned short insn2
1598 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1599
1600 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1601
1602 insn1
1603 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1604 insn2
1605 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1606
1607 /* movt Rd, #const */
1608 if ((insn1 & 0xfbc0) == 0xf2c0)
1609 {
1610 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1611 *destreg = bits (insn2, 8, 11);
1612 *offset = 8;
1613 address = (high << 16 | low);
1614 }
1615 }
1616 }
1617 else
1618 {
1619 unsigned int insn
1620 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1621
1622 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1623 {
1624 address = bits (insn, 0, 11) + pc + 8;
1625 address = read_memory_unsigned_integer (address, 4,
1626 byte_order_for_code);
1627
1628 *destreg = bits (insn, 12, 15);
1629 *offset = 4;
1630 }
1631 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1632 {
1633 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1634
1635 insn
1636 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1637
1638 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1639 {
1640 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1641 *destreg = bits (insn, 12, 15);
1642 *offset = 8;
1643 address = (high << 16 | low);
1644 }
1645 }
1646 }
1647
1648 return address;
1649 }
1650
1651 /* Try to skip a sequence of instructions used for stack protector. If PC
1652 points to the first instruction of this sequence, return the address of
1653 first instruction after this sequence, otherwise, return original PC.
1654
1655 On arm, this sequence of instructions is composed of mainly three steps,
1656 Step 1: load symbol __stack_chk_guard,
1657 Step 2: load from address of __stack_chk_guard,
1658 Step 3: store it to somewhere else.
1659
1660 Usually, instructions on step 2 and step 3 are the same on various ARM
1661 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1662 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1663 instructions in step 1 vary from different ARM architectures. On ARMv7,
1664 they are,
1665
1666 movw Rn, #:lower16:__stack_chk_guard
1667 movt Rn, #:upper16:__stack_chk_guard
1668
1669 On ARMv5t, it is,
1670
1671 ldr Rn, .Label
1672 ....
1673 .Lable:
1674 .word __stack_chk_guard
1675
1676 Since ldr/str is a very popular instruction, we can't use them as
1677 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1678 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1679 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1680
1681 static CORE_ADDR
1682 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1683 {
1684 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1685 unsigned int basereg;
1686 struct bound_minimal_symbol stack_chk_guard;
1687 int offset;
1688 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1689 CORE_ADDR addr;
1690
1691 /* Try to parse the instructions in Step 1. */
1692 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1693 &basereg, &offset);
1694 if (!addr)
1695 return pc;
1696
1697 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1698 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1699 Otherwise, this sequence cannot be for stack protector. */
1700 if (stack_chk_guard.minsym == NULL
1701 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1702 return pc;
1703
1704 if (is_thumb)
1705 {
1706 unsigned int destreg;
1707 unsigned short insn
1708 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1709
1710 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1711 if ((insn & 0xf800) != 0x6800)
1712 return pc;
1713 if (bits (insn, 3, 5) != basereg)
1714 return pc;
1715 destreg = bits (insn, 0, 2);
1716
1717 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1718 byte_order_for_code);
1719 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1720 if ((insn & 0xf800) != 0x6000)
1721 return pc;
1722 if (destreg != bits (insn, 0, 2))
1723 return pc;
1724 }
1725 else
1726 {
1727 unsigned int destreg;
1728 unsigned int insn
1729 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1730
1731 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1732 if ((insn & 0x0e500000) != 0x04100000)
1733 return pc;
1734 if (bits (insn, 16, 19) != basereg)
1735 return pc;
1736 destreg = bits (insn, 12, 15);
1737 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1738 insn = read_code_unsigned_integer (pc + offset + 4,
1739 4, byte_order_for_code);
1740 if ((insn & 0x0e500000) != 0x04000000)
1741 return pc;
1742 if (bits (insn, 12, 15) != destreg)
1743 return pc;
1744 }
1745 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1746 on arm. */
1747 if (is_thumb)
1748 return pc + offset + 4;
1749 else
1750 return pc + offset + 8;
1751 }
1752
1753 /* Advance the PC across any function entry prologue instructions to
1754 reach some "real" code.
1755
1756 The APCS (ARM Procedure Call Standard) defines the following
1757 prologue:
1758
1759 mov ip, sp
1760 [stmfd sp!, {a1,a2,a3,a4}]
1761 stmfd sp!, {...,fp,ip,lr,pc}
1762 [stfe f7, [sp, #-12]!]
1763 [stfe f6, [sp, #-12]!]
1764 [stfe f5, [sp, #-12]!]
1765 [stfe f4, [sp, #-12]!]
1766 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1767
1768 static CORE_ADDR
1769 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1770 {
1771 CORE_ADDR func_addr, limit_pc;
1772
1773 /* See if we can determine the end of the prologue via the symbol table.
1774 If so, then return either PC, or the PC after the prologue, whichever
1775 is greater. */
1776 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1777 {
1778 CORE_ADDR post_prologue_pc
1779 = skip_prologue_using_sal (gdbarch, func_addr);
1780 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1781
1782 if (post_prologue_pc)
1783 post_prologue_pc
1784 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1785
1786
1787 /* GCC always emits a line note before the prologue and another
1788 one after, even if the two are at the same address or on the
1789 same line. Take advantage of this so that we do not need to
1790 know every instruction that might appear in the prologue. We
1791 will have producer information for most binaries; if it is
1792 missing (e.g. for -gstabs), assuming the GNU tools. */
1793 if (post_prologue_pc
1794 && (cust == NULL
1795 || cust->producer () == NULL
1796 || startswith (cust->producer (), "GNU ")
1797 || producer_is_llvm (cust->producer ())))
1798 return post_prologue_pc;
1799
1800 if (post_prologue_pc != 0)
1801 {
1802 CORE_ADDR analyzed_limit;
1803
1804 /* For non-GCC compilers, make sure the entire line is an
1805 acceptable prologue; GDB will round this function's
1806 return value up to the end of the following line so we
1807 can not skip just part of a line (and we do not want to).
1808
1809 RealView does not treat the prologue specially, but does
1810 associate prologue code with the opening brace; so this
1811 lets us skip the first line if we think it is the opening
1812 brace. */
1813 if (arm_pc_is_thumb (gdbarch, func_addr))
1814 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1815 post_prologue_pc, NULL);
1816 else
1817 analyzed_limit
1818 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1819 NULL, target_arm_instruction_reader ());
1820
1821 if (analyzed_limit != post_prologue_pc)
1822 return func_addr;
1823
1824 return post_prologue_pc;
1825 }
1826 }
1827
1828 /* Can't determine prologue from the symbol table, need to examine
1829 instructions. */
1830
1831 /* Find an upper limit on the function prologue using the debug
1832 information. If the debug information could not be used to provide
1833 that bound, then use an arbitrary large number as the upper bound. */
1834 /* Like arm_scan_prologue, stop no later than pc + 64. */
1835 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1836 if (limit_pc == 0)
1837 limit_pc = pc + 64; /* Magic. */
1838
1839
1840 /* Check if this is Thumb code. */
1841 if (arm_pc_is_thumb (gdbarch, pc))
1842 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1843 else
1844 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1845 target_arm_instruction_reader ());
1846 }
1847
1848 /* *INDENT-OFF* */
1849 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1850 This function decodes a Thumb function prologue to determine:
1851 1) the size of the stack frame
1852 2) which registers are saved on it
1853 3) the offsets of saved regs
1854 4) the offset from the stack pointer to the frame pointer
1855
1856 A typical Thumb function prologue would create this stack frame
1857 (offsets relative to FP)
1858 old SP -> 24 stack parameters
1859 20 LR
1860 16 R7
1861 R7 -> 0 local variables (16 bytes)
1862 SP -> -12 additional stack space (12 bytes)
1863 The frame size would thus be 36 bytes, and the frame offset would be
1864 12 bytes. The frame register is R7.
1865
1866 The comments for thumb_skip_prolog() describe the algorithm we use
1867 to detect the end of the prolog. */
1868 /* *INDENT-ON* */
1869
1870 static void
1871 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1872 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1873 {
1874 CORE_ADDR prologue_start;
1875 CORE_ADDR prologue_end;
1876
1877 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1878 &prologue_end))
1879 {
1880 /* See comment in arm_scan_prologue for an explanation of
1881 this heuristics. */
1882 if (prologue_end > prologue_start + 64)
1883 {
1884 prologue_end = prologue_start + 64;
1885 }
1886 }
1887 else
1888 /* We're in the boondocks: we have no idea where the start of the
1889 function is. */
1890 return;
1891
1892 prologue_end = std::min (prologue_end, prev_pc);
1893
1894 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1895 }
1896
1897 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1898 otherwise. */
1899
1900 static int
1901 arm_instruction_restores_sp (unsigned int insn)
1902 {
1903 if (bits (insn, 28, 31) != INST_NV)
1904 {
1905 if ((insn & 0x0df0f000) == 0x0080d000
1906 /* ADD SP (register or immediate). */
1907 || (insn & 0x0df0f000) == 0x0040d000
1908 /* SUB SP (register or immediate). */
1909 || (insn & 0x0ffffff0) == 0x01a0d000
1910 /* MOV SP. */
1911 || (insn & 0x0fff0000) == 0x08bd0000
1912 /* POP (LDMIA). */
1913 || (insn & 0x0fff0000) == 0x049d0000)
1914 /* POP of a single register. */
1915 return 1;
1916 }
1917
1918 return 0;
1919 }
1920
1921 /* Implement immediate value decoding, as described in section A5.2.4
1922 (Modified immediate constants in ARM instructions) of the ARM Architecture
1923 Reference Manual (ARMv7-A and ARMv7-R edition). */
1924
1925 static uint32_t
1926 arm_expand_immediate (uint32_t imm)
1927 {
1928 /* Immediate values are 12 bits long. */
1929 gdb_assert ((imm & 0xfffff000) == 0);
1930
1931 uint32_t unrotated_value = imm & 0xff;
1932 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1933
1934 if (rotate_amount == 0)
1935 return unrotated_value;
1936
1937 return ((unrotated_value >> rotate_amount)
1938 | (unrotated_value << (32 - rotate_amount)));
1939 }
1940
1941 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1942 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1943 fill it in. Return the first address not recognized as a prologue
1944 instruction.
1945
1946 We recognize all the instructions typically found in ARM prologues,
1947 plus harmless instructions which can be skipped (either for analysis
1948 purposes, or a more restrictive set that can be skipped when finding
1949 the end of the prologue). */
1950
1951 static CORE_ADDR
1952 arm_analyze_prologue (struct gdbarch *gdbarch,
1953 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1954 struct arm_prologue_cache *cache,
1955 const arm_instruction_reader &insn_reader)
1956 {
1957 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1958 int regno;
1959 CORE_ADDR offset, current_pc;
1960 pv_t regs[ARM_FPS_REGNUM];
1961 CORE_ADDR unrecognized_pc = 0;
1962 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1963
1964 /* Search the prologue looking for instructions that set up the
1965 frame pointer, adjust the stack pointer, and save registers.
1966
1967 Be careful, however, and if it doesn't look like a prologue,
1968 don't try to scan it. If, for instance, a frameless function
1969 begins with stmfd sp!, then we will tell ourselves there is
1970 a frame, which will confuse stack traceback, as well as "finish"
1971 and other operations that rely on a knowledge of the stack
1972 traceback. */
1973
1974 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1975 regs[regno] = pv_register (regno, 0);
1976 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1977
1978 for (current_pc = prologue_start;
1979 current_pc < prologue_end;
1980 current_pc += 4)
1981 {
1982 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1983
1984 if (insn == 0xe1a0c00d) /* mov ip, sp */
1985 {
1986 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1987 continue;
1988 }
1989 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1990 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1991 {
1992 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1993 int rd = bits (insn, 12, 15);
1994 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1995 continue;
1996 }
1997 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1998 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1999 {
2000 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2001 int rd = bits (insn, 12, 15);
2002 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2003 continue;
2004 }
2005 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2006 [sp, #-4]! */
2007 {
2008 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2009 break;
2010 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2011 stack.store (regs[ARM_SP_REGNUM], 4,
2012 regs[bits (insn, 12, 15)]);
2013 continue;
2014 }
2015 else if ((insn & 0xffff0000) == 0xe92d0000)
2016 /* stmfd sp!, {..., fp, ip, lr, pc}
2017 or
2018 stmfd sp!, {a1, a2, a3, a4} */
2019 {
2020 int mask = insn & 0xffff;
2021
2022 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2023 break;
2024
2025 /* Calculate offsets of saved registers. */
2026 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2027 if (mask & (1 << regno))
2028 {
2029 regs[ARM_SP_REGNUM]
2030 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2031 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2032 }
2033 }
2034 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2035 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2036 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2037 {
2038 /* No need to add this to saved_regs -- it's just an arg reg. */
2039 continue;
2040 }
2041 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2042 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2043 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2044 {
2045 /* No need to add this to saved_regs -- it's just an arg reg. */
2046 continue;
2047 }
2048 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2049 { registers } */
2050 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2051 {
2052 /* No need to add this to saved_regs -- it's just arg regs. */
2053 continue;
2054 }
2055 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2056 {
2057 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2058 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2059 }
2060 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2061 {
2062 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2063 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2064 }
2065 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2066 [sp, -#c]! */
2067 && tdep->have_fpa_registers)
2068 {
2069 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2070 break;
2071
2072 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2073 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2074 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2075 }
2076 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2077 [sp!] */
2078 && tdep->have_fpa_registers)
2079 {
2080 int n_saved_fp_regs;
2081 unsigned int fp_start_reg, fp_bound_reg;
2082
2083 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2084 break;
2085
2086 if ((insn & 0x800) == 0x800) /* N0 is set */
2087 {
2088 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2089 n_saved_fp_regs = 3;
2090 else
2091 n_saved_fp_regs = 1;
2092 }
2093 else
2094 {
2095 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2096 n_saved_fp_regs = 2;
2097 else
2098 n_saved_fp_regs = 4;
2099 }
2100
2101 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2102 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2103 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2104 {
2105 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2106 stack.store (regs[ARM_SP_REGNUM], 12,
2107 regs[fp_start_reg++]);
2108 }
2109 }
2110 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2111 {
2112 /* Allow some special function calls when skipping the
2113 prologue; GCC generates these before storing arguments to
2114 the stack. */
2115 CORE_ADDR dest = BranchDest (current_pc, insn);
2116
2117 if (skip_prologue_function (gdbarch, dest, 0))
2118 continue;
2119 else
2120 break;
2121 }
2122 else if ((insn & 0xf0000000) != 0xe0000000)
2123 break; /* Condition not true, exit early. */
2124 else if (arm_instruction_changes_pc (insn))
2125 /* Don't scan past anything that might change control flow. */
2126 break;
2127 else if (arm_instruction_restores_sp (insn))
2128 {
2129 /* Don't scan past the epilogue. */
2130 break;
2131 }
2132 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2133 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2134 /* Ignore block loads from the stack, potentially copying
2135 parameters from memory. */
2136 continue;
2137 else if ((insn & 0xfc500000) == 0xe4100000
2138 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2139 /* Similarly ignore single loads from the stack. */
2140 continue;
2141 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2142 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2143 register instead of the stack. */
2144 continue;
2145 else
2146 {
2147 /* The optimizer might shove anything into the prologue, if
2148 we build up cache (cache != NULL) from scanning prologue,
2149 we just skip what we don't recognize and scan further to
2150 make cache as complete as possible. However, if we skip
2151 prologue, we'll stop immediately on unrecognized
2152 instruction. */
2153 unrecognized_pc = current_pc;
2154 if (cache != NULL)
2155 continue;
2156 else
2157 break;
2158 }
2159 }
2160
2161 if (unrecognized_pc == 0)
2162 unrecognized_pc = current_pc;
2163
2164 if (cache)
2165 {
2166 int framereg, framesize;
2167
2168 /* The frame size is just the distance from the frame register
2169 to the original stack pointer. */
2170 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2171 {
2172 /* Frame pointer is fp. */
2173 framereg = ARM_FP_REGNUM;
2174 framesize = -regs[ARM_FP_REGNUM].k;
2175 }
2176 else
2177 {
2178 /* Try the stack pointer... this is a bit desperate. */
2179 framereg = ARM_SP_REGNUM;
2180 framesize = -regs[ARM_SP_REGNUM].k;
2181 }
2182
2183 cache->framereg = framereg;
2184 cache->framesize = framesize;
2185
2186 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2187 if (stack.find_reg (gdbarch, regno, &offset))
2188 {
2189 cache->saved_regs[regno].set_addr (offset);
2190 if (regno == ARM_SP_REGNUM)
2191 arm_cache_set_active_sp_value(cache, tdep, offset);
2192 }
2193 }
2194
2195 arm_debug_printf ("Prologue scan stopped at %s",
2196 paddress (gdbarch, unrecognized_pc));
2197
2198 return unrecognized_pc;
2199 }
2200
2201 static void
2202 arm_scan_prologue (frame_info_ptr this_frame,
2203 struct arm_prologue_cache *cache)
2204 {
2205 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2206 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2207 CORE_ADDR prologue_start, prologue_end;
2208 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2209 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2210 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2211
2212 /* Assume there is no frame until proven otherwise. */
2213 cache->framereg = ARM_SP_REGNUM;
2214 cache->framesize = 0;
2215
2216 /* Check for Thumb prologue. */
2217 if (arm_frame_is_thumb (this_frame))
2218 {
2219 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2220 return;
2221 }
2222
2223 /* Find the function prologue. If we can't find the function in
2224 the symbol table, peek in the stack frame to find the PC. */
2225 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2226 &prologue_end))
2227 {
2228 /* One way to find the end of the prologue (which works well
2229 for unoptimized code) is to do the following:
2230
2231 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2232
2233 if (sal.line == 0)
2234 prologue_end = prev_pc;
2235 else if (sal.end < prologue_end)
2236 prologue_end = sal.end;
2237
2238 This mechanism is very accurate so long as the optimizer
2239 doesn't move any instructions from the function body into the
2240 prologue. If this happens, sal.end will be the last
2241 instruction in the first hunk of prologue code just before
2242 the first instruction that the scheduler has moved from
2243 the body to the prologue.
2244
2245 In order to make sure that we scan all of the prologue
2246 instructions, we use a slightly less accurate mechanism which
2247 may scan more than necessary. To help compensate for this
2248 lack of accuracy, the prologue scanning loop below contains
2249 several clauses which'll cause the loop to terminate early if
2250 an implausible prologue instruction is encountered.
2251
2252 The expression
2253
2254 prologue_start + 64
2255
2256 is a suitable endpoint since it accounts for the largest
2257 possible prologue plus up to five instructions inserted by
2258 the scheduler. */
2259
2260 if (prologue_end > prologue_start + 64)
2261 {
2262 prologue_end = prologue_start + 64; /* See above. */
2263 }
2264 }
2265 else
2266 {
2267 /* We have no symbol information. Our only option is to assume this
2268 function has a standard stack frame and the normal frame register.
2269 Then, we can find the value of our frame pointer on entrance to
2270 the callee (or at the present moment if this is the innermost frame).
2271 The value stored there should be the address of the stmfd + 8. */
2272 CORE_ADDR frame_loc;
2273 ULONGEST return_value;
2274
2275 /* AAPCS does not use a frame register, so we can abort here. */
2276 if (tdep->arm_abi == ARM_ABI_AAPCS)
2277 return;
2278
2279 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2280 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2281 &return_value))
2282 return;
2283 else
2284 {
2285 prologue_start = gdbarch_addr_bits_remove
2286 (gdbarch, return_value) - 8;
2287 prologue_end = prologue_start + 64; /* See above. */
2288 }
2289 }
2290
2291 if (prev_pc < prologue_end)
2292 prologue_end = prev_pc;
2293
2294 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2295 target_arm_instruction_reader ());
2296 }
2297
2298 static struct arm_prologue_cache *
2299 arm_make_prologue_cache (frame_info_ptr this_frame)
2300 {
2301 int reg;
2302 struct arm_prologue_cache *cache;
2303 CORE_ADDR unwound_fp, prev_sp;
2304
2305 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2306 arm_cache_init (cache, this_frame);
2307
2308 arm_scan_prologue (this_frame, cache);
2309
2310 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2311 if (unwound_fp == 0)
2312 return cache;
2313
2314 arm_gdbarch_tdep *tdep =
2315 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2316
2317 prev_sp = unwound_fp + cache->framesize;
2318 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2319
2320 /* Calculate actual addresses of saved registers using offsets
2321 determined by arm_scan_prologue. */
2322 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2323 if (cache->saved_regs[reg].is_addr ())
2324 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2325 prev_sp);
2326
2327 return cache;
2328 }
2329
2330 /* Implementation of the stop_reason hook for arm_prologue frames. */
2331
2332 static enum unwind_stop_reason
2333 arm_prologue_unwind_stop_reason (frame_info_ptr this_frame,
2334 void **this_cache)
2335 {
2336 struct arm_prologue_cache *cache;
2337 CORE_ADDR pc;
2338
2339 if (*this_cache == NULL)
2340 *this_cache = arm_make_prologue_cache (this_frame);
2341 cache = (struct arm_prologue_cache *) *this_cache;
2342
2343 /* This is meant to halt the backtrace at "_start". */
2344 pc = get_frame_pc (this_frame);
2345 gdbarch *arch = get_frame_arch (this_frame);
2346 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2347 if (pc <= tdep->lowest_pc)
2348 return UNWIND_OUTERMOST;
2349
2350 /* If we've hit a wall, stop. */
2351 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2352 return UNWIND_OUTERMOST;
2353
2354 return UNWIND_NO_REASON;
2355 }
2356
2357 /* Our frame ID for a normal frame is the current function's starting PC
2358 and the caller's SP when we were called. */
2359
2360 static void
2361 arm_prologue_this_id (frame_info_ptr this_frame,
2362 void **this_cache,
2363 struct frame_id *this_id)
2364 {
2365 struct arm_prologue_cache *cache;
2366 struct frame_id id;
2367 CORE_ADDR pc, func;
2368
2369 if (*this_cache == NULL)
2370 *this_cache = arm_make_prologue_cache (this_frame);
2371 cache = (struct arm_prologue_cache *) *this_cache;
2372
2373 arm_gdbarch_tdep *tdep
2374 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2375
2376 /* Use function start address as part of the frame ID. If we cannot
2377 identify the start address (due to missing symbol information),
2378 fall back to just using the current PC. */
2379 pc = get_frame_pc (this_frame);
2380 func = get_frame_func (this_frame);
2381 if (!func)
2382 func = pc;
2383
2384 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2385 *this_id = id;
2386 }
2387
2388 static struct value *
2389 arm_prologue_prev_register (frame_info_ptr this_frame,
2390 void **this_cache,
2391 int prev_regnum)
2392 {
2393 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2394 struct arm_prologue_cache *cache;
2395 CORE_ADDR sp_value;
2396
2397 if (*this_cache == NULL)
2398 *this_cache = arm_make_prologue_cache (this_frame);
2399 cache = (struct arm_prologue_cache *) *this_cache;
2400
2401 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2402
2403 /* If this frame has signed the return address, mark it as so. */
2404 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2405 && *cache->ra_signed_state)
2406 set_frame_previous_pc_masked (this_frame);
2407
2408 /* If we are asked to unwind the PC, then we need to return the LR
2409 instead. The prologue may save PC, but it will point into this
2410 frame's prologue, not the next frame's resume location. Also
2411 strip the saved T bit. A valid LR may have the low bit set, but
2412 a valid PC never does. */
2413 if (prev_regnum == ARM_PC_REGNUM)
2414 {
2415 CORE_ADDR lr;
2416
2417 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2418 return frame_unwind_got_constant (this_frame, prev_regnum,
2419 arm_addr_bits_remove (gdbarch, lr));
2420 }
2421
2422 /* SP is generally not saved to the stack, but this frame is
2423 identified by the next frame's stack pointer at the time of the call.
2424 The value was already reconstructed into PREV_SP. */
2425 if (prev_regnum == ARM_SP_REGNUM)
2426 return frame_unwind_got_constant (this_frame, prev_regnum,
2427 arm_cache_get_prev_sp_value (cache, tdep));
2428
2429 /* The value might be one of the alternative SP, if so, use the
2430 value already constructed. */
2431 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2432 {
2433 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2434 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2435 }
2436
2437 /* The CPSR may have been changed by the call instruction and by the
2438 called function. The only bit we can reconstruct is the T bit,
2439 by checking the low bit of LR as of the call. This is a reliable
2440 indicator of Thumb-ness except for some ARM v4T pre-interworking
2441 Thumb code, which could get away with a clear low bit as long as
2442 the called function did not use bx. Guess that all other
2443 bits are unchanged; the condition flags are presumably lost,
2444 but the processor status is likely valid. */
2445 if (prev_regnum == ARM_PS_REGNUM)
2446 {
2447 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2448 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2449
2450 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2451 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2452 }
2453
2454 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2455 prev_regnum);
2456 }
2457
2458 static frame_unwind arm_prologue_unwind = {
2459 "arm prologue",
2460 NORMAL_FRAME,
2461 arm_prologue_unwind_stop_reason,
2462 arm_prologue_this_id,
2463 arm_prologue_prev_register,
2464 NULL,
2465 default_frame_sniffer
2466 };
2467
2468 /* Maintain a list of ARM exception table entries per objfile, similar to the
2469 list of mapping symbols. We only cache entries for standard ARM-defined
2470 personality routines; the cache will contain only the frame unwinding
2471 instructions associated with the entry (not the descriptors). */
2472
2473 struct arm_exidx_entry
2474 {
2475 CORE_ADDR addr;
2476 gdb_byte *entry;
2477
2478 bool operator< (const arm_exidx_entry &other) const
2479 {
2480 return addr < other.addr;
2481 }
2482 };
2483
2484 struct arm_exidx_data
2485 {
2486 std::vector<std::vector<arm_exidx_entry>> section_maps;
2487 };
2488
2489 /* Per-BFD key to store exception handling information. */
2490 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2491
2492 static struct obj_section *
2493 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2494 {
2495 struct obj_section *osect;
2496
2497 ALL_OBJFILE_OSECTIONS (objfile, osect)
2498 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2499 {
2500 bfd_vma start, size;
2501 start = bfd_section_vma (osect->the_bfd_section);
2502 size = bfd_section_size (osect->the_bfd_section);
2503
2504 if (start <= vma && vma < start + size)
2505 return osect;
2506 }
2507
2508 return NULL;
2509 }
2510
2511 /* Parse contents of exception table and exception index sections
2512 of OBJFILE, and fill in the exception table entry cache.
2513
2514 For each entry that refers to a standard ARM-defined personality
2515 routine, extract the frame unwinding instructions (from either
2516 the index or the table section). The unwinding instructions
2517 are normalized by:
2518 - extracting them from the rest of the table data
2519 - converting to host endianness
2520 - appending the implicit 0xb0 ("Finish") code
2521
2522 The extracted and normalized instructions are stored for later
2523 retrieval by the arm_find_exidx_entry routine. */
2524
2525 static void
2526 arm_exidx_new_objfile (struct objfile *objfile)
2527 {
2528 struct arm_exidx_data *data;
2529 asection *exidx, *extab;
2530 bfd_vma exidx_vma = 0, extab_vma = 0;
2531 LONGEST i;
2532
2533 /* If we've already touched this file, do nothing. */
2534 if (!objfile || arm_exidx_data_key.get (objfile->obfd.get ()) != NULL)
2535 return;
2536
2537 /* Read contents of exception table and index. */
2538 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2539 ELF_STRING_ARM_unwind);
2540 gdb::byte_vector exidx_data;
2541 if (exidx)
2542 {
2543 exidx_vma = bfd_section_vma (exidx);
2544 exidx_data.resize (bfd_section_size (exidx));
2545
2546 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2547 exidx_data.data (), 0,
2548 exidx_data.size ()))
2549 return;
2550 }
2551
2552 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2553 gdb::byte_vector extab_data;
2554 if (extab)
2555 {
2556 extab_vma = bfd_section_vma (extab);
2557 extab_data.resize (bfd_section_size (extab));
2558
2559 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2560 extab_data.data (), 0,
2561 extab_data.size ()))
2562 return;
2563 }
2564
2565 /* Allocate exception table data structure. */
2566 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2567 data->section_maps.resize (objfile->obfd->section_count);
2568
2569 /* Fill in exception table. */
2570 for (i = 0; i < exidx_data.size () / 8; i++)
2571 {
2572 struct arm_exidx_entry new_exidx_entry;
2573 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2574 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2575 exidx_data.data () + i * 8 + 4);
2576 bfd_vma addr = 0, word = 0;
2577 int n_bytes = 0, n_words = 0;
2578 struct obj_section *sec;
2579 gdb_byte *entry = NULL;
2580
2581 /* Extract address of start of function. */
2582 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2583 idx += exidx_vma + i * 8;
2584
2585 /* Find section containing function and compute section offset. */
2586 sec = arm_obj_section_from_vma (objfile, idx);
2587 if (sec == NULL)
2588 continue;
2589 idx -= bfd_section_vma (sec->the_bfd_section);
2590
2591 /* Determine address of exception table entry. */
2592 if (val == 1)
2593 {
2594 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2595 }
2596 else if ((val & 0xff000000) == 0x80000000)
2597 {
2598 /* Exception table entry embedded in .ARM.exidx
2599 -- must be short form. */
2600 word = val;
2601 n_bytes = 3;
2602 }
2603 else if (!(val & 0x80000000))
2604 {
2605 /* Exception table entry in .ARM.extab. */
2606 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2607 addr += exidx_vma + i * 8 + 4;
2608
2609 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2610 {
2611 word = bfd_h_get_32 (objfile->obfd,
2612 extab_data.data () + addr - extab_vma);
2613 addr += 4;
2614
2615 if ((word & 0xff000000) == 0x80000000)
2616 {
2617 /* Short form. */
2618 n_bytes = 3;
2619 }
2620 else if ((word & 0xff000000) == 0x81000000
2621 || (word & 0xff000000) == 0x82000000)
2622 {
2623 /* Long form. */
2624 n_bytes = 2;
2625 n_words = ((word >> 16) & 0xff);
2626 }
2627 else if (!(word & 0x80000000))
2628 {
2629 bfd_vma pers;
2630 struct obj_section *pers_sec;
2631 int gnu_personality = 0;
2632
2633 /* Custom personality routine. */
2634 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2635 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2636
2637 /* Check whether we've got one of the variants of the
2638 GNU personality routines. */
2639 pers_sec = arm_obj_section_from_vma (objfile, pers);
2640 if (pers_sec)
2641 {
2642 static const char *personality[] =
2643 {
2644 "__gcc_personality_v0",
2645 "__gxx_personality_v0",
2646 "__gcj_personality_v0",
2647 "__gnu_objc_personality_v0",
2648 NULL
2649 };
2650
2651 CORE_ADDR pc = pers + pers_sec->offset ();
2652 int k;
2653
2654 for (k = 0; personality[k]; k++)
2655 if (lookup_minimal_symbol_by_pc_name
2656 (pc, personality[k], objfile))
2657 {
2658 gnu_personality = 1;
2659 break;
2660 }
2661 }
2662
2663 /* If so, the next word contains a word count in the high
2664 byte, followed by the same unwind instructions as the
2665 pre-defined forms. */
2666 if (gnu_personality
2667 && addr + 4 <= extab_vma + extab_data.size ())
2668 {
2669 word = bfd_h_get_32 (objfile->obfd,
2670 (extab_data.data ()
2671 + addr - extab_vma));
2672 addr += 4;
2673 n_bytes = 3;
2674 n_words = ((word >> 24) & 0xff);
2675 }
2676 }
2677 }
2678 }
2679
2680 /* Sanity check address. */
2681 if (n_words)
2682 if (addr < extab_vma
2683 || addr + 4 * n_words > extab_vma + extab_data.size ())
2684 n_words = n_bytes = 0;
2685
2686 /* The unwind instructions reside in WORD (only the N_BYTES least
2687 significant bytes are valid), followed by N_WORDS words in the
2688 extab section starting at ADDR. */
2689 if (n_bytes || n_words)
2690 {
2691 gdb_byte *p = entry
2692 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2693 n_bytes + n_words * 4 + 1);
2694
2695 while (n_bytes--)
2696 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2697
2698 while (n_words--)
2699 {
2700 word = bfd_h_get_32 (objfile->obfd,
2701 extab_data.data () + addr - extab_vma);
2702 addr += 4;
2703
2704 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2705 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2706 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2707 *p++ = (gdb_byte) (word & 0xff);
2708 }
2709
2710 /* Implied "Finish" to terminate the list. */
2711 *p++ = 0xb0;
2712 }
2713
2714 /* Push entry onto vector. They are guaranteed to always
2715 appear in order of increasing addresses. */
2716 new_exidx_entry.addr = idx;
2717 new_exidx_entry.entry = entry;
2718 data->section_maps[sec->the_bfd_section->index].push_back
2719 (new_exidx_entry);
2720 }
2721 }
2722
2723 /* Search for the exception table entry covering MEMADDR. If one is found,
2724 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2725 set *START to the start of the region covered by this entry. */
2726
2727 static gdb_byte *
2728 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2729 {
2730 struct obj_section *sec;
2731
2732 sec = find_pc_section (memaddr);
2733 if (sec != NULL)
2734 {
2735 struct arm_exidx_data *data;
2736 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2737
2738 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2739 if (data != NULL)
2740 {
2741 std::vector<arm_exidx_entry> &map
2742 = data->section_maps[sec->the_bfd_section->index];
2743 if (!map.empty ())
2744 {
2745 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2746
2747 /* std::lower_bound finds the earliest ordered insertion
2748 point. If the following symbol starts at this exact
2749 address, we use that; otherwise, the preceding
2750 exception table entry covers this address. */
2751 if (idx < map.end ())
2752 {
2753 if (idx->addr == map_key.addr)
2754 {
2755 if (start)
2756 *start = idx->addr + sec->addr ();
2757 return idx->entry;
2758 }
2759 }
2760
2761 if (idx > map.begin ())
2762 {
2763 idx = idx - 1;
2764 if (start)
2765 *start = idx->addr + sec->addr ();
2766 return idx->entry;
2767 }
2768 }
2769 }
2770 }
2771
2772 return NULL;
2773 }
2774
2775 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2776 instruction list from the ARM exception table entry ENTRY, allocate and
2777 return a prologue cache structure describing how to unwind this frame.
2778
2779 Return NULL if the unwinding instruction list contains a "spare",
2780 "reserved" or "refuse to unwind" instruction as defined in section
2781 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2782 for the ARM Architecture" document. */
2783
2784 static struct arm_prologue_cache *
2785 arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry)
2786 {
2787 CORE_ADDR vsp = 0;
2788 int vsp_valid = 0;
2789
2790 struct arm_prologue_cache *cache;
2791 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2792 arm_cache_init (cache, this_frame);
2793
2794 for (;;)
2795 {
2796 gdb_byte insn;
2797
2798 /* Whenever we reload SP, we actually have to retrieve its
2799 actual value in the current frame. */
2800 if (!vsp_valid)
2801 {
2802 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2803 {
2804 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2805 vsp = get_frame_register_unsigned (this_frame, reg);
2806 }
2807 else
2808 {
2809 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2810 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2811 }
2812
2813 vsp_valid = 1;
2814 }
2815
2816 /* Decode next unwind instruction. */
2817 insn = *entry++;
2818
2819 if ((insn & 0xc0) == 0)
2820 {
2821 int offset = insn & 0x3f;
2822 vsp += (offset << 2) + 4;
2823 }
2824 else if ((insn & 0xc0) == 0x40)
2825 {
2826 int offset = insn & 0x3f;
2827 vsp -= (offset << 2) + 4;
2828 }
2829 else if ((insn & 0xf0) == 0x80)
2830 {
2831 int mask = ((insn & 0xf) << 8) | *entry++;
2832 int i;
2833
2834 /* The special case of an all-zero mask identifies
2835 "Refuse to unwind". We return NULL to fall back
2836 to the prologue analyzer. */
2837 if (mask == 0)
2838 return NULL;
2839
2840 /* Pop registers r4..r15 under mask. */
2841 for (i = 0; i < 12; i++)
2842 if (mask & (1 << i))
2843 {
2844 cache->saved_regs[4 + i].set_addr (vsp);
2845 vsp += 4;
2846 }
2847
2848 /* Special-case popping SP -- we need to reload vsp. */
2849 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2850 vsp_valid = 0;
2851 }
2852 else if ((insn & 0xf0) == 0x90)
2853 {
2854 int reg = insn & 0xf;
2855
2856 /* Reserved cases. */
2857 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2858 return NULL;
2859
2860 /* Set SP from another register and mark VSP for reload. */
2861 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2862 vsp_valid = 0;
2863 }
2864 else if ((insn & 0xf0) == 0xa0)
2865 {
2866 int count = insn & 0x7;
2867 int pop_lr = (insn & 0x8) != 0;
2868 int i;
2869
2870 /* Pop r4..r[4+count]. */
2871 for (i = 0; i <= count; i++)
2872 {
2873 cache->saved_regs[4 + i].set_addr (vsp);
2874 vsp += 4;
2875 }
2876
2877 /* If indicated by flag, pop LR as well. */
2878 if (pop_lr)
2879 {
2880 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2881 vsp += 4;
2882 }
2883 }
2884 else if (insn == 0xb0)
2885 {
2886 /* We could only have updated PC by popping into it; if so, it
2887 will show up as address. Otherwise, copy LR into PC. */
2888 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2889 cache->saved_regs[ARM_PC_REGNUM]
2890 = cache->saved_regs[ARM_LR_REGNUM];
2891
2892 /* We're done. */
2893 break;
2894 }
2895 else if (insn == 0xb1)
2896 {
2897 int mask = *entry++;
2898 int i;
2899
2900 /* All-zero mask and mask >= 16 is "spare". */
2901 if (mask == 0 || mask >= 16)
2902 return NULL;
2903
2904 /* Pop r0..r3 under mask. */
2905 for (i = 0; i < 4; i++)
2906 if (mask & (1 << i))
2907 {
2908 cache->saved_regs[i].set_addr (vsp);
2909 vsp += 4;
2910 }
2911 }
2912 else if (insn == 0xb2)
2913 {
2914 ULONGEST offset = 0;
2915 unsigned shift = 0;
2916
2917 do
2918 {
2919 offset |= (*entry & 0x7f) << shift;
2920 shift += 7;
2921 }
2922 while (*entry++ & 0x80);
2923
2924 vsp += 0x204 + (offset << 2);
2925 }
2926 else if (insn == 0xb3)
2927 {
2928 int start = *entry >> 4;
2929 int count = (*entry++) & 0xf;
2930 int i;
2931
2932 /* Only registers D0..D15 are valid here. */
2933 if (start + count >= 16)
2934 return NULL;
2935
2936 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2937 for (i = 0; i <= count; i++)
2938 {
2939 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2940 vsp += 8;
2941 }
2942
2943 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2944 vsp += 4;
2945 }
2946 else if ((insn & 0xf8) == 0xb8)
2947 {
2948 int count = insn & 0x7;
2949 int i;
2950
2951 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2952 for (i = 0; i <= count; i++)
2953 {
2954 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2955 vsp += 8;
2956 }
2957
2958 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2959 vsp += 4;
2960 }
2961 else if (insn == 0xc6)
2962 {
2963 int start = *entry >> 4;
2964 int count = (*entry++) & 0xf;
2965 int i;
2966
2967 /* Only registers WR0..WR15 are valid. */
2968 if (start + count >= 16)
2969 return NULL;
2970
2971 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2972 for (i = 0; i <= count; i++)
2973 {
2974 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2975 vsp += 8;
2976 }
2977 }
2978 else if (insn == 0xc7)
2979 {
2980 int mask = *entry++;
2981 int i;
2982
2983 /* All-zero mask and mask >= 16 is "spare". */
2984 if (mask == 0 || mask >= 16)
2985 return NULL;
2986
2987 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2988 for (i = 0; i < 4; i++)
2989 if (mask & (1 << i))
2990 {
2991 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2992 vsp += 4;
2993 }
2994 }
2995 else if ((insn & 0xf8) == 0xc0)
2996 {
2997 int count = insn & 0x7;
2998 int i;
2999
3000 /* Pop iwmmx registers WR[10]..WR[10+count]. */
3001 for (i = 0; i <= count; i++)
3002 {
3003 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3004 vsp += 8;
3005 }
3006 }
3007 else if (insn == 0xc8)
3008 {
3009 int start = *entry >> 4;
3010 int count = (*entry++) & 0xf;
3011 int i;
3012
3013 /* Only registers D0..D31 are valid. */
3014 if (start + count >= 16)
3015 return NULL;
3016
3017 /* Pop VFP double-precision registers
3018 D[16+start]..D[16+start+count]. */
3019 for (i = 0; i <= count; i++)
3020 {
3021 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3022 vsp += 8;
3023 }
3024 }
3025 else if (insn == 0xc9)
3026 {
3027 int start = *entry >> 4;
3028 int count = (*entry++) & 0xf;
3029 int i;
3030
3031 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3032 for (i = 0; i <= count; i++)
3033 {
3034 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3035 vsp += 8;
3036 }
3037 }
3038 else if ((insn & 0xf8) == 0xd0)
3039 {
3040 int count = insn & 0x7;
3041 int i;
3042
3043 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3044 for (i = 0; i <= count; i++)
3045 {
3046 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3047 vsp += 8;
3048 }
3049 }
3050 else
3051 {
3052 /* Everything else is "spare". */
3053 return NULL;
3054 }
3055 }
3056
3057 /* If we restore SP from a register, assume this was the frame register.
3058 Otherwise just fall back to SP as frame register. */
3059 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3060 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3061 else
3062 cache->framereg = ARM_SP_REGNUM;
3063
3064 /* Determine offset to previous frame. */
3065 cache->framesize
3066 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3067
3068 /* We already got the previous SP. */
3069 arm_gdbarch_tdep *tdep
3070 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3071 arm_cache_set_active_sp_value (cache, tdep, vsp);
3072
3073 return cache;
3074 }
3075
3076 /* Unwinding via ARM exception table entries. Note that the sniffer
3077 already computes a filled-in prologue cache, which is then used
3078 with the same arm_prologue_this_id and arm_prologue_prev_register
3079 routines also used for prologue-parsing based unwinding. */
3080
3081 static int
3082 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3083 frame_info_ptr this_frame,
3084 void **this_prologue_cache)
3085 {
3086 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3087 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3088 CORE_ADDR addr_in_block, exidx_region, func_start;
3089 struct arm_prologue_cache *cache;
3090 gdb_byte *entry;
3091
3092 /* See if we have an ARM exception table entry covering this address. */
3093 addr_in_block = get_frame_address_in_block (this_frame);
3094 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3095 if (!entry)
3096 return 0;
3097
3098 /* The ARM exception table does not describe unwind information
3099 for arbitrary PC values, but is guaranteed to be correct only
3100 at call sites. We have to decide here whether we want to use
3101 ARM exception table information for this frame, or fall back
3102 to using prologue parsing. (Note that if we have DWARF CFI,
3103 this sniffer isn't even called -- CFI is always preferred.)
3104
3105 Before we make this decision, however, we check whether we
3106 actually have *symbol* information for the current frame.
3107 If not, prologue parsing would not work anyway, so we might
3108 as well use the exception table and hope for the best. */
3109 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3110 {
3111 int exc_valid = 0;
3112
3113 /* If the next frame is "normal", we are at a call site in this
3114 frame, so exception information is guaranteed to be valid. */
3115 if (get_next_frame (this_frame)
3116 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3117 exc_valid = 1;
3118
3119 /* We also assume exception information is valid if we're currently
3120 blocked in a system call. The system library is supposed to
3121 ensure this, so that e.g. pthread cancellation works. */
3122 if (arm_frame_is_thumb (this_frame))
3123 {
3124 ULONGEST insn;
3125
3126 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
3127 2, byte_order_for_code, &insn)
3128 && (insn & 0xff00) == 0xdf00 /* svc */)
3129 exc_valid = 1;
3130 }
3131 else
3132 {
3133 ULONGEST insn;
3134
3135 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
3136 4, byte_order_for_code, &insn)
3137 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3138 exc_valid = 1;
3139 }
3140
3141 /* Bail out if we don't know that exception information is valid. */
3142 if (!exc_valid)
3143 return 0;
3144
3145 /* The ARM exception index does not mark the *end* of the region
3146 covered by the entry, and some functions will not have any entry.
3147 To correctly recognize the end of the covered region, the linker
3148 should have inserted dummy records with a CANTUNWIND marker.
3149
3150 Unfortunately, current versions of GNU ld do not reliably do
3151 this, and thus we may have found an incorrect entry above.
3152 As a (temporary) sanity check, we only use the entry if it
3153 lies *within* the bounds of the function. Note that this check
3154 might reject perfectly valid entries that just happen to cover
3155 multiple functions; therefore this check ought to be removed
3156 once the linker is fixed. */
3157 if (func_start > exidx_region)
3158 return 0;
3159 }
3160
3161 /* Decode the list of unwinding instructions into a prologue cache.
3162 Note that this may fail due to e.g. a "refuse to unwind" code. */
3163 cache = arm_exidx_fill_cache (this_frame, entry);
3164 if (!cache)
3165 return 0;
3166
3167 *this_prologue_cache = cache;
3168 return 1;
3169 }
3170
3171 struct frame_unwind arm_exidx_unwind = {
3172 "arm exidx",
3173 NORMAL_FRAME,
3174 default_frame_unwind_stop_reason,
3175 arm_prologue_this_id,
3176 arm_prologue_prev_register,
3177 NULL,
3178 arm_exidx_unwind_sniffer
3179 };
3180
3181 static struct arm_prologue_cache *
3182 arm_make_epilogue_frame_cache (frame_info_ptr this_frame)
3183 {
3184 struct arm_prologue_cache *cache;
3185 int reg;
3186
3187 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3188 arm_cache_init (cache, this_frame);
3189
3190 /* Still rely on the offset calculated from prologue. */
3191 arm_scan_prologue (this_frame, cache);
3192
3193 /* Since we are in epilogue, the SP has been restored. */
3194 arm_gdbarch_tdep *tdep
3195 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3196 arm_cache_set_active_sp_value (cache, tdep,
3197 get_frame_register_unsigned (this_frame,
3198 ARM_SP_REGNUM));
3199
3200 /* Calculate actual addresses of saved registers using offsets
3201 determined by arm_scan_prologue. */
3202 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3203 if (cache->saved_regs[reg].is_addr ())
3204 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3205 + arm_cache_get_prev_sp_value (cache, tdep));
3206
3207 return cache;
3208 }
3209
3210 /* Implementation of function hook 'this_id' in
3211 'struct frame_uwnind' for epilogue unwinder. */
3212
3213 static void
3214 arm_epilogue_frame_this_id (frame_info_ptr this_frame,
3215 void **this_cache,
3216 struct frame_id *this_id)
3217 {
3218 struct arm_prologue_cache *cache;
3219 CORE_ADDR pc, func;
3220
3221 if (*this_cache == NULL)
3222 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3223 cache = (struct arm_prologue_cache *) *this_cache;
3224
3225 /* Use function start address as part of the frame ID. If we cannot
3226 identify the start address (due to missing symbol information),
3227 fall back to just using the current PC. */
3228 pc = get_frame_pc (this_frame);
3229 func = get_frame_func (this_frame);
3230 if (func == 0)
3231 func = pc;
3232
3233 arm_gdbarch_tdep *tdep
3234 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3235 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3236 }
3237
3238 /* Implementation of function hook 'prev_register' in
3239 'struct frame_uwnind' for epilogue unwinder. */
3240
3241 static struct value *
3242 arm_epilogue_frame_prev_register (frame_info_ptr this_frame,
3243 void **this_cache, int regnum)
3244 {
3245 if (*this_cache == NULL)
3246 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3247
3248 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3249 }
3250
3251 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3252 CORE_ADDR pc);
3253 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3254 CORE_ADDR pc);
3255
3256 /* Implementation of function hook 'sniffer' in
3257 'struct frame_uwnind' for epilogue unwinder. */
3258
3259 static int
3260 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3261 frame_info_ptr this_frame,
3262 void **this_prologue_cache)
3263 {
3264 if (frame_relative_level (this_frame) == 0)
3265 {
3266 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3267 CORE_ADDR pc = get_frame_pc (this_frame);
3268
3269 if (arm_frame_is_thumb (this_frame))
3270 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3271 else
3272 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3273 }
3274 else
3275 return 0;
3276 }
3277
3278 /* Frame unwinder from epilogue. */
3279
3280 static const struct frame_unwind arm_epilogue_frame_unwind =
3281 {
3282 "arm epilogue",
3283 NORMAL_FRAME,
3284 default_frame_unwind_stop_reason,
3285 arm_epilogue_frame_this_id,
3286 arm_epilogue_frame_prev_register,
3287 NULL,
3288 arm_epilogue_frame_sniffer,
3289 };
3290
3291 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3292 trampoline, return the target PC. Otherwise return 0.
3293
3294 void call0a (char c, short s, int i, long l) {}
3295
3296 int main (void)
3297 {
3298 (*pointer_to_call0a) (c, s, i, l);
3299 }
3300
3301 Instead of calling a stub library function _call_via_xx (xx is
3302 the register name), GCC may inline the trampoline in the object
3303 file as below (register r2 has the address of call0a).
3304
3305 .global main
3306 .type main, %function
3307 ...
3308 bl .L1
3309 ...
3310 .size main, .-main
3311
3312 .L1:
3313 bx r2
3314
3315 The trampoline 'bx r2' doesn't belong to main. */
3316
3317 static CORE_ADDR
3318 arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc)
3319 {
3320 /* The heuristics of recognizing such trampoline is that FRAME is
3321 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3322 if (arm_frame_is_thumb (frame))
3323 {
3324 gdb_byte buf[2];
3325
3326 if (target_read_memory (pc, buf, 2) == 0)
3327 {
3328 struct gdbarch *gdbarch = get_frame_arch (frame);
3329 enum bfd_endian byte_order_for_code
3330 = gdbarch_byte_order_for_code (gdbarch);
3331 uint16_t insn
3332 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3333
3334 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3335 {
3336 CORE_ADDR dest
3337 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3338
3339 /* Clear the LSB so that gdb core sets step-resume
3340 breakpoint at the right address. */
3341 return UNMAKE_THUMB_ADDR (dest);
3342 }
3343 }
3344 }
3345
3346 return 0;
3347 }
3348
3349 static struct arm_prologue_cache *
3350 arm_make_stub_cache (frame_info_ptr this_frame)
3351 {
3352 struct arm_prologue_cache *cache;
3353
3354 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3355 arm_cache_init (cache, this_frame);
3356
3357 arm_gdbarch_tdep *tdep
3358 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3359 arm_cache_set_active_sp_value (cache, tdep,
3360 get_frame_register_unsigned (this_frame,
3361 ARM_SP_REGNUM));
3362
3363 return cache;
3364 }
3365
3366 /* Our frame ID for a stub frame is the current SP and LR. */
3367
3368 static void
3369 arm_stub_this_id (frame_info_ptr this_frame,
3370 void **this_cache,
3371 struct frame_id *this_id)
3372 {
3373 struct arm_prologue_cache *cache;
3374
3375 if (*this_cache == NULL)
3376 *this_cache = arm_make_stub_cache (this_frame);
3377 cache = (struct arm_prologue_cache *) *this_cache;
3378
3379 arm_gdbarch_tdep *tdep
3380 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3381 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3382 get_frame_pc (this_frame));
3383 }
3384
3385 static int
3386 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3387 frame_info_ptr this_frame,
3388 void **this_prologue_cache)
3389 {
3390 CORE_ADDR addr_in_block;
3391 gdb_byte dummy[4];
3392 CORE_ADDR pc, start_addr;
3393 const char *name;
3394
3395 addr_in_block = get_frame_address_in_block (this_frame);
3396 pc = get_frame_pc (this_frame);
3397 if (in_plt_section (addr_in_block)
3398 /* We also use the stub winder if the target memory is unreadable
3399 to avoid having the prologue unwinder trying to read it. */
3400 || target_read_memory (pc, dummy, 4) != 0)
3401 return 1;
3402
3403 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3404 && arm_skip_bx_reg (this_frame, pc) != 0)
3405 return 1;
3406
3407 return 0;
3408 }
3409
3410 struct frame_unwind arm_stub_unwind = {
3411 "arm stub",
3412 NORMAL_FRAME,
3413 default_frame_unwind_stop_reason,
3414 arm_stub_this_id,
3415 arm_prologue_prev_register,
3416 NULL,
3417 arm_stub_unwind_sniffer
3418 };
3419
3420 /* Put here the code to store, into CACHE->saved_regs, the addresses
3421 of the saved registers of frame described by THIS_FRAME. CACHE is
3422 returned. */
3423
3424 static struct arm_prologue_cache *
3425 arm_m_exception_cache (frame_info_ptr this_frame)
3426 {
3427 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3428 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3429 struct arm_prologue_cache *cache;
3430
3431 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3432 arm_cache_init (cache, this_frame);
3433
3434 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3435 describes which bits in LR that define which stack was used prior
3436 to the exception and if FPU is used (causing extended stack frame). */
3437
3438 /* In the lockup state PC contains a lockup magic value.
3439 The PC value of the the next outer frame is irreversibly
3440 lost. The other registers are intact so LR likely contains
3441 PC of some frame next to the outer one, but we cannot analyze
3442 the next outer frame without knowing its PC
3443 therefore we do not know SP fixup for this frame.
3444 Some heuristics to resynchronize SP might be possible.
3445 For simplicity, just terminate the unwinding to prevent it going
3446 astray and attempting to read data/addresses it shouldn't,
3447 which may cause further issues due to side-effects. */
3448 CORE_ADDR pc = get_frame_pc (this_frame);
3449 if (arm_m_addr_is_lockup (pc))
3450 {
3451 /* The lockup can be real just in the innermost frame
3452 as the CPU is stopped and cannot create more frames.
3453 If we hit lockup magic PC in the other frame, it is
3454 just a sentinel at the top of stack: do not warn then. */
3455 if (frame_relative_level (this_frame) == 0)
3456 warning (_("ARM M in lockup state, stack unwinding terminated."));
3457
3458 /* Terminate any further stack unwinding. */
3459 arm_cache_set_active_sp_value (cache, tdep, 0);
3460 return cache;
3461 }
3462
3463 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3464
3465 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3466 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3467 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3468 reset if Main Extension is implemented, otherwise the value is unknown. */
3469 if (lr == 0xffffffff)
3470 {
3471 /* Terminate any further stack unwinding. */
3472 arm_cache_set_active_sp_value (cache, tdep, 0);
3473 return cache;
3474 }
3475
3476 /* Check FNC_RETURN indicator bits (24-31). */
3477 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3478 if (fnc_return)
3479 {
3480 /* FNC_RETURN is only valid for targets with Security Extension. */
3481 if (!tdep->have_sec_ext)
3482 {
3483 error (_("While unwinding an exception frame, found unexpected Link "
3484 "Register value %s that requires the security extension, "
3485 "but the extension was not found or is disabled. This "
3486 "should not happen and may be caused by corrupt data or a "
3487 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3488 }
3489
3490 if (!arm_unwind_secure_frames)
3491 {
3492 warning (_("Non-secure to secure stack unwinding disabled."));
3493
3494 /* Terminate any further stack unwinding. */
3495 arm_cache_set_active_sp_value (cache, tdep, 0);
3496 return cache;
3497 }
3498
3499 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3500 if ((xpsr & 0x1ff) != 0)
3501 /* Handler mode: This is the mode that exceptions are handled in. */
3502 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3503 else
3504 /* Thread mode: This is the normal mode that programs run in. */
3505 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3506
3507 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3508
3509 /* Stack layout for a function call from Secure to Non-Secure state
3510 (ARMv8-M section B3.16):
3511
3512 SP Offset
3513
3514 +-------------------+
3515 0x08 | |
3516 +-------------------+ <-- Original SP
3517 0x04 | Partial xPSR |
3518 +-------------------+
3519 0x00 | Return Address |
3520 +===================+ <-- New SP */
3521
3522 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3523 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3524 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3525
3526 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3527
3528 return cache;
3529 }
3530
3531 /* Check EXC_RETURN indicator bits (24-31). */
3532 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3533 if (exc_return)
3534 {
3535 int sp_regnum;
3536 bool secure_stack_used = false;
3537 bool default_callee_register_stacking = false;
3538 bool exception_domain_is_secure = false;
3539 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3540
3541 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3542 bool process_stack_used = (bit (lr, 2) != 0);
3543
3544 if (tdep->have_sec_ext)
3545 {
3546 secure_stack_used = (bit (lr, 6) != 0);
3547 default_callee_register_stacking = (bit (lr, 5) != 0);
3548 exception_domain_is_secure = (bit (lr, 0) != 0);
3549
3550 /* Unwinding from non-secure to secure can trip security
3551 measures. In order to avoid the debugger being
3552 intrusive, rely on the user to configure the requested
3553 mode. */
3554 if (secure_stack_used && !exception_domain_is_secure
3555 && !arm_unwind_secure_frames)
3556 {
3557 warning (_("Non-secure to secure stack unwinding disabled."));
3558
3559 /* Terminate any further stack unwinding. */
3560 arm_cache_set_active_sp_value (cache, tdep, 0);
3561 return cache;
3562 }
3563
3564 if (process_stack_used)
3565 {
3566 if (secure_stack_used)
3567 /* Secure thread (process) stack used, use PSP_S as SP. */
3568 sp_regnum = tdep->m_profile_psp_s_regnum;
3569 else
3570 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3571 sp_regnum = tdep->m_profile_psp_ns_regnum;
3572 }
3573 else
3574 {
3575 if (secure_stack_used)
3576 /* Secure main stack used, use MSP_S as SP. */
3577 sp_regnum = tdep->m_profile_msp_s_regnum;
3578 else
3579 /* Non-secure main stack used, use MSP_NS as SP. */
3580 sp_regnum = tdep->m_profile_msp_ns_regnum;
3581 }
3582 }
3583 else
3584 {
3585 if (process_stack_used)
3586 /* Thread (process) stack used, use PSP as SP. */
3587 sp_regnum = tdep->m_profile_psp_regnum;
3588 else
3589 /* Main stack used, use MSP as SP. */
3590 sp_regnum = tdep->m_profile_msp_regnum;
3591 }
3592
3593 /* Set the active SP regnum. */
3594 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3595
3596 /* Fetch the SP to use for this frame. */
3597 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3598
3599 /* Exception entry context stacking are described in ARMv8-M (section
3600 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3601 Manuals.
3602
3603 The following figure shows the structure of the stack frame when
3604 Security and Floating-point extensions are present.
3605
3606 SP Offsets
3607 Without With
3608 Callee Regs Callee Regs
3609 (Secure -> Non-Secure)
3610 +-------------------+
3611 0xA8 | | 0xD0
3612 +===================+ --+ <-- Original SP
3613 0xA4 | S31 | 0xCC |
3614 +-------------------+ |
3615 ... | Additional FP context
3616 +-------------------+ |
3617 0x68 | S16 | 0x90 |
3618 +===================+ --+
3619 0x64 | Reserved | 0x8C |
3620 +-------------------+ |
3621 0x60 | FPSCR | 0x88 |
3622 +-------------------+ |
3623 0x5C | S15 | 0x84 | FP context
3624 +-------------------+ |
3625 ... |
3626 +-------------------+ |
3627 0x20 | S0 | 0x48 |
3628 +===================+ --+
3629 0x1C | xPSR | 0x44 |
3630 +-------------------+ |
3631 0x18 | Return address | 0x40 |
3632 +-------------------+ |
3633 0x14 | LR(R14) | 0x3C |
3634 +-------------------+ |
3635 0x10 | R12 | 0x38 | State context
3636 +-------------------+ |
3637 0x0C | R3 | 0x34 |
3638 +-------------------+ |
3639 ... |
3640 +-------------------+ |
3641 0x00 | R0 | 0x28 |
3642 +===================+ --+
3643 | R11 | 0x24 |
3644 +-------------------+ |
3645 ... |
3646 +-------------------+ | Additional state
3647 | R4 | 0x08 | context when
3648 +-------------------+ | transitioning from
3649 | Reserved | 0x04 | Secure to Non-Secure
3650 +-------------------+ |
3651 | Magic signature | 0x00 |
3652 +===================+ --+ <-- New SP */
3653
3654 uint32_t sp_r0_offset = 0;
3655
3656 /* With the Security extension, the hardware saves R4..R11 too. */
3657 if (tdep->have_sec_ext && secure_stack_used
3658 && (!default_callee_register_stacking || !exception_domain_is_secure))
3659 {
3660 /* Read R4..R11 from the integer callee registers. */
3661 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3662 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3663 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3664 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3665 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3666 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3667 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3668 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3669 sp_r0_offset = 0x28;
3670 }
3671
3672 /* The hardware saves eight 32-bit words, comprising xPSR,
3673 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3674 "B1.5.6 Exception entry behavior" in
3675 "ARMv7-M Architecture Reference Manual". */
3676 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3677 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3678 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3679 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3680 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3681 + 0x10);
3682 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3683 + 0x14);
3684 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3685 + 0x18);
3686 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3687 + 0x1C);
3688
3689 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3690 type used. */
3691 bool extended_frame_used = (bit (lr, 4) == 0);
3692 if (extended_frame_used)
3693 {
3694 ULONGEST fpccr;
3695 ULONGEST fpcar;
3696
3697 /* Read FPCCR register. */
3698 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3699 byte_order, &fpccr))
3700 {
3701 warning (_("Could not fetch required FPCCR content. Further "
3702 "unwinding is impossible."));
3703 arm_cache_set_active_sp_value (cache, tdep, 0);
3704 return cache;
3705 }
3706
3707 /* Read FPCAR register. */
3708 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3709 byte_order, &fpcar))
3710 {
3711 warning (_("Could not fetch FPCAR content. Further unwinding of "
3712 "FP register values will be unreliable."));
3713 fpcar = 0;
3714 }
3715
3716 bool fpccr_aspen = bit (fpccr, 31);
3717 bool fpccr_lspen = bit (fpccr, 30);
3718 bool fpccr_ts = bit (fpccr, 26);
3719 bool fpccr_lspact = bit (fpccr, 0);
3720
3721 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3722 for FP registers is enabled or disabled. The LSPACT bit indicate,
3723 together with FPCAR, if the lazy state preservation feature is
3724 active for the current frame or for another frame.
3725 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3726 supported by Cortex-M4F architecture for details. */
3727 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3728 == (fpcar & ~0x7));
3729 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3730 && fpccr_lspact
3731 && fpcar_points_to_this_frame));
3732
3733 /* Extended stack frame type used. */
3734 if (read_fp_regs_from_stack)
3735 {
3736 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3737 for (int i = 0; i < 8; i++)
3738 {
3739 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3740 addr += 8;
3741 }
3742 }
3743 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3744 + sp_r0_offset + 0x60);
3745
3746 if (tdep->have_sec_ext && !default_callee_register_stacking
3747 && fpccr_ts)
3748 {
3749 /* Handle floating-point callee saved registers. */
3750 if (read_fp_regs_from_stack)
3751 {
3752 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3753 for (int i = 8; i < 16; i++)
3754 {
3755 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3756 addr += 8;
3757 }
3758 }
3759
3760 arm_cache_set_active_sp_value (cache, tdep,
3761 unwound_sp + sp_r0_offset + 0xA8);
3762 }
3763 else
3764 {
3765 /* Offset 0x64 is reserved. */
3766 arm_cache_set_active_sp_value (cache, tdep,
3767 unwound_sp + sp_r0_offset + 0x68);
3768 }
3769 }
3770 else
3771 {
3772 /* Standard stack frame type used. */
3773 arm_cache_set_active_sp_value (cache, tdep,
3774 unwound_sp + sp_r0_offset + 0x20);
3775 }
3776
3777 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3778 aligner between the top of the 32-byte stack frame and the
3779 previous context's stack pointer. */
3780 ULONGEST xpsr;
3781 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3782 .addr (), ARM_INT_REGISTER_SIZE,
3783 byte_order, &xpsr))
3784 {
3785 warning (_("Could not fetch required XPSR content. Further "
3786 "unwinding is impossible."));
3787 arm_cache_set_active_sp_value (cache, tdep, 0);
3788 return cache;
3789 }
3790
3791 if (bit (xpsr, 9) != 0)
3792 {
3793 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3794 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3795 }
3796
3797 return cache;
3798 }
3799
3800 internal_error (_("While unwinding an exception frame, "
3801 "found unexpected Link Register value "
3802 "%s. This should not happen and may "
3803 "be caused by corrupt data or a bug in"
3804 " GDB."),
3805 phex (lr, ARM_INT_REGISTER_SIZE));
3806 }
3807
3808 /* Implementation of the stop_reason hook for arm_m_exception frames. */
3809
3810 static enum unwind_stop_reason
3811 arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame,
3812 void **this_cache)
3813 {
3814 struct arm_prologue_cache *cache;
3815 arm_gdbarch_tdep *tdep
3816 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3817
3818 if (*this_cache == NULL)
3819 *this_cache = arm_m_exception_cache (this_frame);
3820 cache = (struct arm_prologue_cache *) *this_cache;
3821
3822 /* If we've hit a wall, stop. */
3823 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3824 return UNWIND_OUTERMOST;
3825
3826 return UNWIND_NO_REASON;
3827 }
3828
3829 /* Implementation of function hook 'this_id' in
3830 'struct frame_uwnind'. */
3831
3832 static void
3833 arm_m_exception_this_id (frame_info_ptr this_frame,
3834 void **this_cache,
3835 struct frame_id *this_id)
3836 {
3837 struct arm_prologue_cache *cache;
3838
3839 if (*this_cache == NULL)
3840 *this_cache = arm_m_exception_cache (this_frame);
3841 cache = (struct arm_prologue_cache *) *this_cache;
3842
3843 /* Our frame ID for a stub frame is the current SP and LR. */
3844 arm_gdbarch_tdep *tdep
3845 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3846 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3847 get_frame_pc (this_frame));
3848 }
3849
3850 /* Implementation of function hook 'prev_register' in
3851 'struct frame_uwnind'. */
3852
3853 static struct value *
3854 arm_m_exception_prev_register (frame_info_ptr this_frame,
3855 void **this_cache,
3856 int prev_regnum)
3857 {
3858 struct arm_prologue_cache *cache;
3859 CORE_ADDR sp_value;
3860
3861 if (*this_cache == NULL)
3862 *this_cache = arm_m_exception_cache (this_frame);
3863 cache = (struct arm_prologue_cache *) *this_cache;
3864
3865 /* The value was already reconstructed into PREV_SP. */
3866 arm_gdbarch_tdep *tdep
3867 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3868 if (prev_regnum == ARM_SP_REGNUM)
3869 return frame_unwind_got_constant (this_frame, prev_regnum,
3870 arm_cache_get_prev_sp_value (cache, tdep));
3871
3872 /* If we are asked to unwind the PC, strip the saved T bit. */
3873 if (prev_regnum == ARM_PC_REGNUM)
3874 {
3875 struct value *value = trad_frame_get_prev_register (this_frame,
3876 cache->saved_regs,
3877 prev_regnum);
3878 CORE_ADDR pc = value_as_address (value);
3879 return frame_unwind_got_constant (this_frame, prev_regnum,
3880 UNMAKE_THUMB_ADDR (pc));
3881 }
3882
3883 /* The value might be one of the alternative SP, if so, use the
3884 value already constructed. */
3885 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3886 {
3887 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3888 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3889 }
3890
3891 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3892 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3893 pattern. */
3894 if (prev_regnum == ARM_PS_REGNUM)
3895 {
3896 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3897 struct value *value = trad_frame_get_prev_register (this_frame,
3898 cache->saved_regs,
3899 ARM_PC_REGNUM);
3900 CORE_ADDR pc = value_as_address (value);
3901 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3902 ARM_PS_REGNUM);
3903 ULONGEST xpsr = value_as_long (value);
3904
3905 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3906 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3907 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3908 }
3909
3910 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3911 prev_regnum);
3912 }
3913
3914 /* Implementation of function hook 'sniffer' in
3915 'struct frame_uwnind'. */
3916
3917 static int
3918 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3919 frame_info_ptr this_frame,
3920 void **this_prologue_cache)
3921 {
3922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3923 CORE_ADDR this_pc = get_frame_pc (this_frame);
3924
3925 /* No need to check is_m; this sniffer is only registered for
3926 M-profile architectures. */
3927
3928 /* Check if exception frame returns to a magic PC value. */
3929 return arm_m_addr_is_magic (gdbarch, this_pc);
3930 }
3931
3932 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3933 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3934
3935 struct frame_unwind arm_m_exception_unwind =
3936 {
3937 "arm m exception lockup sec_fnc",
3938 SIGTRAMP_FRAME,
3939 arm_m_exception_frame_unwind_stop_reason,
3940 arm_m_exception_this_id,
3941 arm_m_exception_prev_register,
3942 NULL,
3943 arm_m_exception_unwind_sniffer
3944 };
3945
3946 static CORE_ADDR
3947 arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache)
3948 {
3949 struct arm_prologue_cache *cache;
3950
3951 if (*this_cache == NULL)
3952 *this_cache = arm_make_prologue_cache (this_frame);
3953 cache = (struct arm_prologue_cache *) *this_cache;
3954
3955 arm_gdbarch_tdep *tdep
3956 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3957 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3958 }
3959
3960 struct frame_base arm_normal_base = {
3961 &arm_prologue_unwind,
3962 arm_normal_frame_base,
3963 arm_normal_frame_base,
3964 arm_normal_frame_base
3965 };
3966
3967 static struct value *
3968 arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache,
3969 int regnum)
3970 {
3971 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3972 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3973 CORE_ADDR lr;
3974 ULONGEST cpsr;
3975
3976 if (regnum == ARM_PC_REGNUM)
3977 {
3978 /* The PC is normally copied from the return column, which
3979 describes saves of LR. However, that version may have an
3980 extra bit set to indicate Thumb state. The bit is not
3981 part of the PC. */
3982
3983 /* Record in the frame whether the return address was signed. */
3984 if (tdep->have_pacbti)
3985 {
3986 CORE_ADDR ra_auth_code
3987 = frame_unwind_register_unsigned (this_frame,
3988 tdep->pacbti_pseudo_base);
3989
3990 if (ra_auth_code != 0)
3991 set_frame_previous_pc_masked (this_frame);
3992 }
3993
3994 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3995 return frame_unwind_got_constant (this_frame, regnum,
3996 arm_addr_bits_remove (gdbarch, lr));
3997 }
3998 else if (regnum == ARM_PS_REGNUM)
3999 {
4000 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4001 cpsr = get_frame_register_unsigned (this_frame, regnum);
4002 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4003 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4004 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4005 }
4006 else if (arm_is_alternative_sp_register (tdep, regnum))
4007 {
4008 /* Handle the alternative SP registers on Cortex-M. */
4009 bool override_with_sp_value = false;
4010 CORE_ADDR val;
4011
4012 if (tdep->have_sec_ext)
4013 {
4014 CORE_ADDR sp
4015 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4016 CORE_ADDR msp_s
4017 = get_frame_register_unsigned (this_frame,
4018 tdep->m_profile_msp_s_regnum);
4019 CORE_ADDR msp_ns
4020 = get_frame_register_unsigned (this_frame,
4021 tdep->m_profile_msp_ns_regnum);
4022 CORE_ADDR psp_s
4023 = get_frame_register_unsigned (this_frame,
4024 tdep->m_profile_psp_s_regnum);
4025 CORE_ADDR psp_ns
4026 = get_frame_register_unsigned (this_frame,
4027 tdep->m_profile_psp_ns_regnum);
4028
4029 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4030 && (msp_s == sp || msp_ns == sp);
4031 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4032 && (msp_s == sp);
4033 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4034 && (msp_ns == sp);
4035 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4036 && (psp_s == sp || psp_ns == sp);
4037 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4038 && (psp_s == sp);
4039 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4040 && (psp_ns == sp);
4041
4042 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4043 || is_psp || is_psp_s || is_psp_ns;
4044
4045 }
4046 else if (tdep->is_m)
4047 {
4048 CORE_ADDR sp
4049 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4050 CORE_ADDR msp
4051 = get_frame_register_unsigned (this_frame,
4052 tdep->m_profile_msp_regnum);
4053 CORE_ADDR psp
4054 = get_frame_register_unsigned (this_frame,
4055 tdep->m_profile_psp_regnum);
4056
4057 bool is_msp = (regnum == tdep->m_profile_msp_regnum) && (sp == msp);
4058 bool is_psp = (regnum == tdep->m_profile_psp_regnum) && (sp == psp);
4059
4060 override_with_sp_value = is_msp || is_psp;
4061 }
4062
4063 if (override_with_sp_value)
4064 {
4065 /* Use value of SP from previous frame. */
4066 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4067 if (prev_frame)
4068 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4069 else
4070 val = get_frame_base (this_frame);
4071 }
4072 else
4073 /* Use value for the register from previous frame. */
4074 val = get_frame_register_unsigned (this_frame, regnum);
4075
4076 return frame_unwind_got_constant (this_frame, regnum, val);
4077 }
4078
4079 internal_error (_("Unexpected register %d"), regnum);
4080 }
4081
4082 /* Implement the stack_frame_destroyed_p gdbarch method. */
4083
4084 static int
4085 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4086 {
4087 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4088 unsigned int insn, insn2;
4089 int found_return = 0, found_stack_adjust = 0;
4090 CORE_ADDR func_start, func_end;
4091 CORE_ADDR scan_pc;
4092 gdb_byte buf[4];
4093
4094 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4095 return 0;
4096
4097 /* The epilogue is a sequence of instructions along the following lines:
4098
4099 - add stack frame size to SP or FP
4100 - [if frame pointer used] restore SP from FP
4101 - restore registers from SP [may include PC]
4102 - a return-type instruction [if PC wasn't already restored]
4103
4104 In a first pass, we scan forward from the current PC and verify the
4105 instructions we find as compatible with this sequence, ending in a
4106 return instruction.
4107
4108 However, this is not sufficient to distinguish indirect function calls
4109 within a function from indirect tail calls in the epilogue in some cases.
4110 Therefore, if we didn't already find any SP-changing instruction during
4111 forward scan, we add a backward scanning heuristic to ensure we actually
4112 are in the epilogue. */
4113
4114 scan_pc = pc;
4115 while (scan_pc < func_end && !found_return)
4116 {
4117 if (target_read_memory (scan_pc, buf, 2))
4118 break;
4119
4120 scan_pc += 2;
4121 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4122
4123 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4124 found_return = 1;
4125 else if (insn == 0x46f7) /* mov pc, lr */
4126 found_return = 1;
4127 else if (thumb_instruction_restores_sp (insn))
4128 {
4129 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4130 found_return = 1;
4131 }
4132 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4133 {
4134 if (target_read_memory (scan_pc, buf, 2))
4135 break;
4136
4137 scan_pc += 2;
4138 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4139
4140 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4141 {
4142 if (insn2 & 0x8000) /* <registers> include PC. */
4143 found_return = 1;
4144 }
4145 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4146 && (insn2 & 0x0fff) == 0x0b04)
4147 {
4148 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4149 found_return = 1;
4150 }
4151 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4152 && (insn2 & 0x0e00) == 0x0a00)
4153 ;
4154 else
4155 break;
4156 }
4157 else
4158 break;
4159 }
4160
4161 if (!found_return)
4162 return 0;
4163
4164 /* Since any instruction in the epilogue sequence, with the possible
4165 exception of return itself, updates the stack pointer, we need to
4166 scan backwards for at most one instruction. Try either a 16-bit or
4167 a 32-bit instruction. This is just a heuristic, so we do not worry
4168 too much about false positives. */
4169
4170 if (pc - 4 < func_start)
4171 return 0;
4172 if (target_read_memory (pc - 4, buf, 4))
4173 return 0;
4174
4175 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4176 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4177
4178 if (thumb_instruction_restores_sp (insn2))
4179 found_stack_adjust = 1;
4180 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4181 found_stack_adjust = 1;
4182 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4183 && (insn2 & 0x0fff) == 0x0b04)
4184 found_stack_adjust = 1;
4185 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4186 && (insn2 & 0x0e00) == 0x0a00)
4187 found_stack_adjust = 1;
4188
4189 return found_stack_adjust;
4190 }
4191
4192 static int
4193 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4194 {
4195 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4196 unsigned int insn;
4197 int found_return;
4198 CORE_ADDR func_start, func_end;
4199
4200 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4201 return 0;
4202
4203 /* We are in the epilogue if the previous instruction was a stack
4204 adjustment and the next instruction is a possible return (bx, mov
4205 pc, or pop). We could have to scan backwards to find the stack
4206 adjustment, or forwards to find the return, but this is a decent
4207 approximation. First scan forwards. */
4208
4209 found_return = 0;
4210 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4211 if (bits (insn, 28, 31) != INST_NV)
4212 {
4213 if ((insn & 0x0ffffff0) == 0x012fff10)
4214 /* BX. */
4215 found_return = 1;
4216 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4217 /* MOV PC. */
4218 found_return = 1;
4219 else if ((insn & 0x0fff0000) == 0x08bd0000
4220 && (insn & 0x0000c000) != 0)
4221 /* POP (LDMIA), including PC or LR. */
4222 found_return = 1;
4223 }
4224
4225 if (!found_return)
4226 return 0;
4227
4228 /* Scan backwards. This is just a heuristic, so do not worry about
4229 false positives from mode changes. */
4230
4231 if (pc < func_start + 4)
4232 return 0;
4233
4234 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4235 if (arm_instruction_restores_sp (insn))
4236 return 1;
4237
4238 return 0;
4239 }
4240
4241 /* Implement the stack_frame_destroyed_p gdbarch method. */
4242
4243 static int
4244 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4245 {
4246 if (arm_pc_is_thumb (gdbarch, pc))
4247 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4248 else
4249 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4250 }
4251
4252 /* When arguments must be pushed onto the stack, they go on in reverse
4253 order. The code below implements a FILO (stack) to do this. */
4254
4255 struct arm_stack_item
4256 {
4257 int len;
4258 struct arm_stack_item *prev;
4259 gdb_byte *data;
4260 };
4261
4262 static struct arm_stack_item *
4263 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4264 int len)
4265 {
4266 struct arm_stack_item *si;
4267 si = XNEW (struct arm_stack_item);
4268 si->data = (gdb_byte *) xmalloc (len);
4269 si->len = len;
4270 si->prev = prev;
4271 memcpy (si->data, contents, len);
4272 return si;
4273 }
4274
4275 static struct arm_stack_item *
4276 pop_stack_item (struct arm_stack_item *si)
4277 {
4278 struct arm_stack_item *dead = si;
4279 si = si->prev;
4280 xfree (dead->data);
4281 xfree (dead);
4282 return si;
4283 }
4284
4285 /* Implement the gdbarch type alignment method, overrides the generic
4286 alignment algorithm for anything that is arm specific. */
4287
4288 static ULONGEST
4289 arm_type_align (gdbarch *gdbarch, struct type *t)
4290 {
4291 t = check_typedef (t);
4292 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4293 {
4294 /* Use the natural alignment for vector types (the same for
4295 scalar type), but the maximum alignment is 64-bit. */
4296 if (t->length () > 8)
4297 return 8;
4298 else
4299 return t->length ();
4300 }
4301
4302 /* Allow the common code to calculate the alignment. */
4303 return 0;
4304 }
4305
4306 /* Possible base types for a candidate for passing and returning in
4307 VFP registers. */
4308
4309 enum arm_vfp_cprc_base_type
4310 {
4311 VFP_CPRC_UNKNOWN,
4312 VFP_CPRC_SINGLE,
4313 VFP_CPRC_DOUBLE,
4314 VFP_CPRC_VEC64,
4315 VFP_CPRC_VEC128
4316 };
4317
4318 /* The length of one element of base type B. */
4319
4320 static unsigned
4321 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4322 {
4323 switch (b)
4324 {
4325 case VFP_CPRC_SINGLE:
4326 return 4;
4327 case VFP_CPRC_DOUBLE:
4328 return 8;
4329 case VFP_CPRC_VEC64:
4330 return 8;
4331 case VFP_CPRC_VEC128:
4332 return 16;
4333 default:
4334 internal_error (_("Invalid VFP CPRC type: %d."),
4335 (int) b);
4336 }
4337 }
4338
4339 /* The character ('s', 'd' or 'q') for the type of VFP register used
4340 for passing base type B. */
4341
4342 static int
4343 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4344 {
4345 switch (b)
4346 {
4347 case VFP_CPRC_SINGLE:
4348 return 's';
4349 case VFP_CPRC_DOUBLE:
4350 return 'd';
4351 case VFP_CPRC_VEC64:
4352 return 'd';
4353 case VFP_CPRC_VEC128:
4354 return 'q';
4355 default:
4356 internal_error (_("Invalid VFP CPRC type: %d."),
4357 (int) b);
4358 }
4359 }
4360
4361 /* Determine whether T may be part of a candidate for passing and
4362 returning in VFP registers, ignoring the limit on the total number
4363 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4364 classification of the first valid component found; if it is not
4365 VFP_CPRC_UNKNOWN, all components must have the same classification
4366 as *BASE_TYPE. If it is found that T contains a type not permitted
4367 for passing and returning in VFP registers, a type differently
4368 classified from *BASE_TYPE, or two types differently classified
4369 from each other, return -1, otherwise return the total number of
4370 base-type elements found (possibly 0 in an empty structure or
4371 array). Vector types are not currently supported, matching the
4372 generic AAPCS support. */
4373
4374 static int
4375 arm_vfp_cprc_sub_candidate (struct type *t,
4376 enum arm_vfp_cprc_base_type *base_type)
4377 {
4378 t = check_typedef (t);
4379 switch (t->code ())
4380 {
4381 case TYPE_CODE_FLT:
4382 switch (t->length ())
4383 {
4384 case 4:
4385 if (*base_type == VFP_CPRC_UNKNOWN)
4386 *base_type = VFP_CPRC_SINGLE;
4387 else if (*base_type != VFP_CPRC_SINGLE)
4388 return -1;
4389 return 1;
4390
4391 case 8:
4392 if (*base_type == VFP_CPRC_UNKNOWN)
4393 *base_type = VFP_CPRC_DOUBLE;
4394 else if (*base_type != VFP_CPRC_DOUBLE)
4395 return -1;
4396 return 1;
4397
4398 default:
4399 return -1;
4400 }
4401 break;
4402
4403 case TYPE_CODE_COMPLEX:
4404 /* Arguments of complex T where T is one of the types float or
4405 double get treated as if they are implemented as:
4406
4407 struct complexT
4408 {
4409 T real;
4410 T imag;
4411 };
4412
4413 */
4414 switch (t->length ())
4415 {
4416 case 8:
4417 if (*base_type == VFP_CPRC_UNKNOWN)
4418 *base_type = VFP_CPRC_SINGLE;
4419 else if (*base_type != VFP_CPRC_SINGLE)
4420 return -1;
4421 return 2;
4422
4423 case 16:
4424 if (*base_type == VFP_CPRC_UNKNOWN)
4425 *base_type = VFP_CPRC_DOUBLE;
4426 else if (*base_type != VFP_CPRC_DOUBLE)
4427 return -1;
4428 return 2;
4429
4430 default:
4431 return -1;
4432 }
4433 break;
4434
4435 case TYPE_CODE_ARRAY:
4436 {
4437 if (t->is_vector ())
4438 {
4439 /* A 64-bit or 128-bit containerized vector type are VFP
4440 CPRCs. */
4441 switch (t->length ())
4442 {
4443 case 8:
4444 if (*base_type == VFP_CPRC_UNKNOWN)
4445 *base_type = VFP_CPRC_VEC64;
4446 return 1;
4447 case 16:
4448 if (*base_type == VFP_CPRC_UNKNOWN)
4449 *base_type = VFP_CPRC_VEC128;
4450 return 1;
4451 default:
4452 return -1;
4453 }
4454 }
4455 else
4456 {
4457 int count;
4458 unsigned unitlen;
4459
4460 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4461 base_type);
4462 if (count == -1)
4463 return -1;
4464 if (t->length () == 0)
4465 {
4466 gdb_assert (count == 0);
4467 return 0;
4468 }
4469 else if (count == 0)
4470 return -1;
4471 unitlen = arm_vfp_cprc_unit_length (*base_type);
4472 gdb_assert ((t->length () % unitlen) == 0);
4473 return t->length () / unitlen;
4474 }
4475 }
4476 break;
4477
4478 case TYPE_CODE_STRUCT:
4479 {
4480 int count = 0;
4481 unsigned unitlen;
4482 int i;
4483 for (i = 0; i < t->num_fields (); i++)
4484 {
4485 int sub_count = 0;
4486
4487 if (!field_is_static (&t->field (i)))
4488 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4489 base_type);
4490 if (sub_count == -1)
4491 return -1;
4492 count += sub_count;
4493 }
4494 if (t->length () == 0)
4495 {
4496 gdb_assert (count == 0);
4497 return 0;
4498 }
4499 else if (count == 0)
4500 return -1;
4501 unitlen = arm_vfp_cprc_unit_length (*base_type);
4502 if (t->length () != unitlen * count)
4503 return -1;
4504 return count;
4505 }
4506
4507 case TYPE_CODE_UNION:
4508 {
4509 int count = 0;
4510 unsigned unitlen;
4511 int i;
4512 for (i = 0; i < t->num_fields (); i++)
4513 {
4514 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4515 base_type);
4516 if (sub_count == -1)
4517 return -1;
4518 count = (count > sub_count ? count : sub_count);
4519 }
4520 if (t->length () == 0)
4521 {
4522 gdb_assert (count == 0);
4523 return 0;
4524 }
4525 else if (count == 0)
4526 return -1;
4527 unitlen = arm_vfp_cprc_unit_length (*base_type);
4528 if (t->length () != unitlen * count)
4529 return -1;
4530 return count;
4531 }
4532
4533 default:
4534 break;
4535 }
4536
4537 return -1;
4538 }
4539
4540 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4541 if passed to or returned from a non-variadic function with the VFP
4542 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4543 *BASE_TYPE to the base type for T and *COUNT to the number of
4544 elements of that base type before returning. */
4545
4546 static int
4547 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4548 int *count)
4549 {
4550 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4551 int c = arm_vfp_cprc_sub_candidate (t, &b);
4552 if (c <= 0 || c > 4)
4553 return 0;
4554 *base_type = b;
4555 *count = c;
4556 return 1;
4557 }
4558
4559 /* Return 1 if the VFP ABI should be used for passing arguments to and
4560 returning values from a function of type FUNC_TYPE, 0
4561 otherwise. */
4562
4563 static int
4564 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4565 {
4566 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4567
4568 /* Variadic functions always use the base ABI. Assume that functions
4569 without debug info are not variadic. */
4570 if (func_type && check_typedef (func_type)->has_varargs ())
4571 return 0;
4572
4573 /* The VFP ABI is only supported as a variant of AAPCS. */
4574 if (tdep->arm_abi != ARM_ABI_AAPCS)
4575 return 0;
4576
4577 return tdep->fp_model == ARM_FLOAT_VFP;
4578 }
4579
4580 /* We currently only support passing parameters in integer registers, which
4581 conforms with GCC's default model, and VFP argument passing following
4582 the VFP variant of AAPCS. Several other variants exist and
4583 we should probably support some of them based on the selected ABI. */
4584
4585 static CORE_ADDR
4586 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4587 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4588 struct value **args, CORE_ADDR sp,
4589 function_call_return_method return_method,
4590 CORE_ADDR struct_addr)
4591 {
4592 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4593 int argnum;
4594 int argreg;
4595 int nstack;
4596 struct arm_stack_item *si = NULL;
4597 int use_vfp_abi;
4598 struct type *ftype;
4599 unsigned vfp_regs_free = (1 << 16) - 1;
4600 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4601
4602 /* Determine the type of this function and whether the VFP ABI
4603 applies. */
4604 ftype = check_typedef (value_type (function));
4605 if (ftype->code () == TYPE_CODE_PTR)
4606 ftype = check_typedef (ftype->target_type ());
4607 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4608
4609 /* Set the return address. For the ARM, the return breakpoint is
4610 always at BP_ADDR. */
4611 if (arm_pc_is_thumb (gdbarch, bp_addr))
4612 bp_addr |= 1;
4613 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4614
4615 /* Walk through the list of args and determine how large a temporary
4616 stack is required. Need to take care here as structs may be
4617 passed on the stack, and we have to push them. */
4618 nstack = 0;
4619
4620 argreg = ARM_A1_REGNUM;
4621 nstack = 0;
4622
4623 /* The struct_return pointer occupies the first parameter
4624 passing register. */
4625 if (return_method == return_method_struct)
4626 {
4627 arm_debug_printf ("struct return in %s = %s",
4628 gdbarch_register_name (gdbarch, argreg),
4629 paddress (gdbarch, struct_addr));
4630
4631 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4632 argreg++;
4633 }
4634
4635 for (argnum = 0; argnum < nargs; argnum++)
4636 {
4637 int len;
4638 struct type *arg_type;
4639 struct type *target_type;
4640 enum type_code typecode;
4641 const bfd_byte *val;
4642 int align;
4643 enum arm_vfp_cprc_base_type vfp_base_type;
4644 int vfp_base_count;
4645 int may_use_core_reg = 1;
4646
4647 arg_type = check_typedef (value_type (args[argnum]));
4648 len = arg_type->length ();
4649 target_type = arg_type->target_type ();
4650 typecode = arg_type->code ();
4651 val = value_contents (args[argnum]).data ();
4652
4653 align = type_align (arg_type);
4654 /* Round alignment up to a whole number of words. */
4655 align = (align + ARM_INT_REGISTER_SIZE - 1)
4656 & ~(ARM_INT_REGISTER_SIZE - 1);
4657 /* Different ABIs have different maximum alignments. */
4658 if (tdep->arm_abi == ARM_ABI_APCS)
4659 {
4660 /* The APCS ABI only requires word alignment. */
4661 align = ARM_INT_REGISTER_SIZE;
4662 }
4663 else
4664 {
4665 /* The AAPCS requires at most doubleword alignment. */
4666 if (align > ARM_INT_REGISTER_SIZE * 2)
4667 align = ARM_INT_REGISTER_SIZE * 2;
4668 }
4669
4670 if (use_vfp_abi
4671 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4672 &vfp_base_count))
4673 {
4674 int regno;
4675 int unit_length;
4676 int shift;
4677 unsigned mask;
4678
4679 /* Because this is a CPRC it cannot go in a core register or
4680 cause a core register to be skipped for alignment.
4681 Either it goes in VFP registers and the rest of this loop
4682 iteration is skipped for this argument, or it goes on the
4683 stack (and the stack alignment code is correct for this
4684 case). */
4685 may_use_core_reg = 0;
4686
4687 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4688 shift = unit_length / 4;
4689 mask = (1 << (shift * vfp_base_count)) - 1;
4690 for (regno = 0; regno < 16; regno += shift)
4691 if (((vfp_regs_free >> regno) & mask) == mask)
4692 break;
4693
4694 if (regno < 16)
4695 {
4696 int reg_char;
4697 int reg_scaled;
4698 int i;
4699
4700 vfp_regs_free &= ~(mask << regno);
4701 reg_scaled = regno / shift;
4702 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4703 for (i = 0; i < vfp_base_count; i++)
4704 {
4705 char name_buf[4];
4706 int regnum;
4707 if (reg_char == 'q')
4708 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4709 val + i * unit_length);
4710 else
4711 {
4712 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4713 reg_char, reg_scaled + i);
4714 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4715 strlen (name_buf));
4716 regcache->cooked_write (regnum, val + i * unit_length);
4717 }
4718 }
4719 continue;
4720 }
4721 else
4722 {
4723 /* This CPRC could not go in VFP registers, so all VFP
4724 registers are now marked as used. */
4725 vfp_regs_free = 0;
4726 }
4727 }
4728
4729 /* Push stack padding for doubleword alignment. */
4730 if (nstack & (align - 1))
4731 {
4732 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4733 nstack += ARM_INT_REGISTER_SIZE;
4734 }
4735
4736 /* Doubleword aligned quantities must go in even register pairs. */
4737 if (may_use_core_reg
4738 && argreg <= ARM_LAST_ARG_REGNUM
4739 && align > ARM_INT_REGISTER_SIZE
4740 && argreg & 1)
4741 argreg++;
4742
4743 /* If the argument is a pointer to a function, and it is a
4744 Thumb function, create a LOCAL copy of the value and set
4745 the THUMB bit in it. */
4746 if (TYPE_CODE_PTR == typecode
4747 && target_type != NULL
4748 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4749 {
4750 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4751 if (arm_pc_is_thumb (gdbarch, regval))
4752 {
4753 bfd_byte *copy = (bfd_byte *) alloca (len);
4754 store_unsigned_integer (copy, len, byte_order,
4755 MAKE_THUMB_ADDR (regval));
4756 val = copy;
4757 }
4758 }
4759
4760 /* Copy the argument to general registers or the stack in
4761 register-sized pieces. Large arguments are split between
4762 registers and stack. */
4763 while (len > 0)
4764 {
4765 int partial_len = len < ARM_INT_REGISTER_SIZE
4766 ? len : ARM_INT_REGISTER_SIZE;
4767 CORE_ADDR regval
4768 = extract_unsigned_integer (val, partial_len, byte_order);
4769
4770 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4771 {
4772 /* The argument is being passed in a general purpose
4773 register. */
4774 if (byte_order == BFD_ENDIAN_BIG)
4775 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4776
4777 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4778 gdbarch_register_name (gdbarch, argreg),
4779 phex (regval, ARM_INT_REGISTER_SIZE));
4780
4781 regcache_cooked_write_unsigned (regcache, argreg, regval);
4782 argreg++;
4783 }
4784 else
4785 {
4786 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4787
4788 memset (buf, 0, sizeof (buf));
4789 store_unsigned_integer (buf, partial_len, byte_order, regval);
4790
4791 /* Push the arguments onto the stack. */
4792 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4793 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4794 nstack += ARM_INT_REGISTER_SIZE;
4795 }
4796
4797 len -= partial_len;
4798 val += partial_len;
4799 }
4800 }
4801 /* If we have an odd number of words to push, then decrement the stack
4802 by one word now, so first stack argument will be dword aligned. */
4803 if (nstack & 4)
4804 sp -= 4;
4805
4806 while (si)
4807 {
4808 sp -= si->len;
4809 write_memory (sp, si->data, si->len);
4810 si = pop_stack_item (si);
4811 }
4812
4813 /* Finally, update teh SP register. */
4814 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4815
4816 return sp;
4817 }
4818
4819
4820 /* Always align the frame to an 8-byte boundary. This is required on
4821 some platforms and harmless on the rest. */
4822
4823 static CORE_ADDR
4824 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4825 {
4826 /* Align the stack to eight bytes. */
4827 return sp & ~ (CORE_ADDR) 7;
4828 }
4829
4830 static void
4831 print_fpu_flags (struct ui_file *file, int flags)
4832 {
4833 if (flags & (1 << 0))
4834 gdb_puts ("IVO ", file);
4835 if (flags & (1 << 1))
4836 gdb_puts ("DVZ ", file);
4837 if (flags & (1 << 2))
4838 gdb_puts ("OFL ", file);
4839 if (flags & (1 << 3))
4840 gdb_puts ("UFL ", file);
4841 if (flags & (1 << 4))
4842 gdb_puts ("INX ", file);
4843 gdb_putc ('\n', file);
4844 }
4845
4846 /* Print interesting information about the floating point processor
4847 (if present) or emulator. */
4848 static void
4849 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4850 frame_info_ptr frame, const char *args)
4851 {
4852 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4853 int type;
4854
4855 type = (status >> 24) & 127;
4856 if (status & (1 << 31))
4857 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4858 else
4859 gdb_printf (file, _("Software FPU type %d\n"), type);
4860 /* i18n: [floating point unit] mask */
4861 gdb_puts (_("mask: "), file);
4862 print_fpu_flags (file, status >> 16);
4863 /* i18n: [floating point unit] flags */
4864 gdb_puts (_("flags: "), file);
4865 print_fpu_flags (file, status);
4866 }
4867
4868 /* Construct the ARM extended floating point type. */
4869 static struct type *
4870 arm_ext_type (struct gdbarch *gdbarch)
4871 {
4872 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4873
4874 if (!tdep->arm_ext_type)
4875 tdep->arm_ext_type
4876 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4877 floatformats_arm_ext);
4878
4879 return tdep->arm_ext_type;
4880 }
4881
4882 static struct type *
4883 arm_neon_double_type (struct gdbarch *gdbarch)
4884 {
4885 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4886
4887 if (tdep->neon_double_type == NULL)
4888 {
4889 struct type *t, *elem;
4890
4891 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4892 TYPE_CODE_UNION);
4893 elem = builtin_type (gdbarch)->builtin_uint8;
4894 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4895 elem = builtin_type (gdbarch)->builtin_uint16;
4896 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4897 elem = builtin_type (gdbarch)->builtin_uint32;
4898 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4899 elem = builtin_type (gdbarch)->builtin_uint64;
4900 append_composite_type_field (t, "u64", elem);
4901 elem = builtin_type (gdbarch)->builtin_float;
4902 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4903 elem = builtin_type (gdbarch)->builtin_double;
4904 append_composite_type_field (t, "f64", elem);
4905
4906 t->set_is_vector (true);
4907 t->set_name ("neon_d");
4908 tdep->neon_double_type = t;
4909 }
4910
4911 return tdep->neon_double_type;
4912 }
4913
4914 /* FIXME: The vector types are not correctly ordered on big-endian
4915 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4916 bits of d0 - regardless of what unit size is being held in d0. So
4917 the offset of the first uint8 in d0 is 7, but the offset of the
4918 first float is 4. This code works as-is for little-endian
4919 targets. */
4920
4921 static struct type *
4922 arm_neon_quad_type (struct gdbarch *gdbarch)
4923 {
4924 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4925
4926 if (tdep->neon_quad_type == NULL)
4927 {
4928 struct type *t, *elem;
4929
4930 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4931 TYPE_CODE_UNION);
4932 elem = builtin_type (gdbarch)->builtin_uint8;
4933 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4934 elem = builtin_type (gdbarch)->builtin_uint16;
4935 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4936 elem = builtin_type (gdbarch)->builtin_uint32;
4937 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4938 elem = builtin_type (gdbarch)->builtin_uint64;
4939 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4940 elem = builtin_type (gdbarch)->builtin_float;
4941 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4942 elem = builtin_type (gdbarch)->builtin_double;
4943 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4944
4945 t->set_is_vector (true);
4946 t->set_name ("neon_q");
4947 tdep->neon_quad_type = t;
4948 }
4949
4950 return tdep->neon_quad_type;
4951 }
4952
4953 /* Return true if REGNUM is a Q pseudo register. Return false
4954 otherwise.
4955
4956 REGNUM is the raw register number and not a pseudo-relative register
4957 number. */
4958
4959 static bool
4960 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4961 {
4962 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4963
4964 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4965 MVE (Q0~Q7) features. */
4966 if (tdep->have_q_pseudos
4967 && regnum >= tdep->q_pseudo_base
4968 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4969 return true;
4970
4971 return false;
4972 }
4973
4974 /* Return true if REGNUM is a VFP S pseudo register. Return false
4975 otherwise.
4976
4977 REGNUM is the raw register number and not a pseudo-relative register
4978 number. */
4979
4980 static bool
4981 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4982 {
4983 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4984
4985 if (tdep->have_s_pseudos
4986 && regnum >= tdep->s_pseudo_base
4987 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4988 return true;
4989
4990 return false;
4991 }
4992
4993 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4994 otherwise.
4995
4996 REGNUM is the raw register number and not a pseudo-relative register
4997 number. */
4998
4999 static bool
5000 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5001 {
5002 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5003
5004 if (tdep->have_mve
5005 && regnum >= tdep->mve_pseudo_base
5006 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5007 return true;
5008
5009 return false;
5010 }
5011
5012 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5013 false otherwise.
5014
5015 REGNUM is the raw register number and not a pseudo-relative register
5016 number. */
5017
5018 static bool
5019 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5020 {
5021 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5022
5023 if (tdep->have_pacbti
5024 && regnum >= tdep->pacbti_pseudo_base
5025 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5026 return true;
5027
5028 return false;
5029 }
5030
5031 /* Return the GDB type object for the "standard" data type of data in
5032 register N. */
5033
5034 static struct type *
5035 arm_register_type (struct gdbarch *gdbarch, int regnum)
5036 {
5037 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5038
5039 if (is_s_pseudo (gdbarch, regnum))
5040 return builtin_type (gdbarch)->builtin_float;
5041
5042 if (is_q_pseudo (gdbarch, regnum))
5043 return arm_neon_quad_type (gdbarch);
5044
5045 if (is_mve_pseudo (gdbarch, regnum))
5046 return builtin_type (gdbarch)->builtin_int16;
5047
5048 if (is_pacbti_pseudo (gdbarch, regnum))
5049 return builtin_type (gdbarch)->builtin_uint32;
5050
5051 /* If the target description has register information, we are only
5052 in this function so that we can override the types of
5053 double-precision registers for NEON. */
5054 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5055 {
5056 struct type *t = tdesc_register_type (gdbarch, regnum);
5057
5058 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5059 && t->code () == TYPE_CODE_FLT
5060 && tdep->have_neon)
5061 return arm_neon_double_type (gdbarch);
5062 else
5063 return t;
5064 }
5065
5066 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5067 {
5068 if (!tdep->have_fpa_registers)
5069 return builtin_type (gdbarch)->builtin_void;
5070
5071 return arm_ext_type (gdbarch);
5072 }
5073 else if (regnum == ARM_SP_REGNUM)
5074 return builtin_type (gdbarch)->builtin_data_ptr;
5075 else if (regnum == ARM_PC_REGNUM)
5076 return builtin_type (gdbarch)->builtin_func_ptr;
5077 else if (regnum >= ARRAY_SIZE (arm_register_names))
5078 /* These registers are only supported on targets which supply
5079 an XML description. */
5080 return builtin_type (gdbarch)->builtin_int0;
5081 else
5082 return builtin_type (gdbarch)->builtin_uint32;
5083 }
5084
5085 /* Map a DWARF register REGNUM onto the appropriate GDB register
5086 number. */
5087
5088 static int
5089 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5090 {
5091 /* Core integer regs. */
5092 if (reg >= 0 && reg <= 15)
5093 return reg;
5094
5095 /* Legacy FPA encoding. These were once used in a way which
5096 overlapped with VFP register numbering, so their use is
5097 discouraged, but GDB doesn't support the ARM toolchain
5098 which used them for VFP. */
5099 if (reg >= 16 && reg <= 23)
5100 return ARM_F0_REGNUM + reg - 16;
5101
5102 /* New assignments for the FPA registers. */
5103 if (reg >= 96 && reg <= 103)
5104 return ARM_F0_REGNUM + reg - 96;
5105
5106 /* WMMX register assignments. */
5107 if (reg >= 104 && reg <= 111)
5108 return ARM_WCGR0_REGNUM + reg - 104;
5109
5110 if (reg >= 112 && reg <= 127)
5111 return ARM_WR0_REGNUM + reg - 112;
5112
5113 /* PACBTI register containing the Pointer Authentication Code. */
5114 if (reg == ARM_DWARF_RA_AUTH_CODE)
5115 {
5116 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5117
5118 if (tdep->have_pacbti)
5119 return tdep->pacbti_pseudo_base;
5120
5121 return -1;
5122 }
5123
5124 if (reg >= 192 && reg <= 199)
5125 return ARM_WC0_REGNUM + reg - 192;
5126
5127 /* VFP v2 registers. A double precision value is actually
5128 in d1 rather than s2, but the ABI only defines numbering
5129 for the single precision registers. This will "just work"
5130 in GDB for little endian targets (we'll read eight bytes,
5131 starting in s0 and then progressing to s1), but will be
5132 reversed on big endian targets with VFP. This won't
5133 be a problem for the new Neon quad registers; you're supposed
5134 to use DW_OP_piece for those. */
5135 if (reg >= 64 && reg <= 95)
5136 {
5137 char name_buf[4];
5138
5139 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5140 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5141 strlen (name_buf));
5142 }
5143
5144 /* VFP v3 / Neon registers. This range is also used for VFP v2
5145 registers, except that it now describes d0 instead of s0. */
5146 if (reg >= 256 && reg <= 287)
5147 {
5148 char name_buf[4];
5149
5150 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5151 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5152 strlen (name_buf));
5153 }
5154
5155 return -1;
5156 }
5157
5158 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5159 static int
5160 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5161 {
5162 int reg = regnum;
5163 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5164
5165 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5166 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5167
5168 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5169 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5170
5171 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5172 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5173
5174 if (reg < NUM_GREGS)
5175 return SIM_ARM_R0_REGNUM + reg;
5176 reg -= NUM_GREGS;
5177
5178 if (reg < NUM_FREGS)
5179 return SIM_ARM_FP0_REGNUM + reg;
5180 reg -= NUM_FREGS;
5181
5182 if (reg < NUM_SREGS)
5183 return SIM_ARM_FPS_REGNUM + reg;
5184 reg -= NUM_SREGS;
5185
5186 internal_error (_("Bad REGNUM %d"), regnum);
5187 }
5188
5189 static const unsigned char op_lit0 = DW_OP_lit0;
5190
5191 static void
5192 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5193 struct dwarf2_frame_state_reg *reg,
5194 frame_info_ptr this_frame)
5195 {
5196 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5197
5198 if (is_pacbti_pseudo (gdbarch, regnum))
5199 {
5200 /* Initialize RA_AUTH_CODE to zero. */
5201 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5202 reg->loc.exp.start = &op_lit0;
5203 reg->loc.exp.len = 1;
5204 return;
5205 }
5206
5207 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5208 {
5209 reg->how = DWARF2_FRAME_REG_FN;
5210 reg->loc.fn = arm_dwarf2_prev_register;
5211 }
5212 else if (regnum == ARM_SP_REGNUM)
5213 reg->how = DWARF2_FRAME_REG_CFA;
5214 else if (arm_is_alternative_sp_register (tdep, regnum))
5215 {
5216 /* Handle the alternative SP registers on Cortex-M. */
5217 reg->how = DWARF2_FRAME_REG_FN;
5218 reg->loc.fn = arm_dwarf2_prev_register;
5219 }
5220 }
5221
5222 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5223 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5224 NULL if an error occurs. BUF is freed. */
5225
5226 static gdb_byte *
5227 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5228 int old_len, int new_len)
5229 {
5230 gdb_byte *new_buf;
5231 int bytes_to_read = new_len - old_len;
5232
5233 new_buf = (gdb_byte *) xmalloc (new_len);
5234 memcpy (new_buf + bytes_to_read, buf, old_len);
5235 xfree (buf);
5236 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5237 {
5238 xfree (new_buf);
5239 return NULL;
5240 }
5241 return new_buf;
5242 }
5243
5244 /* An IT block is at most the 2-byte IT instruction followed by
5245 four 4-byte instructions. The furthest back we must search to
5246 find an IT block that affects the current instruction is thus
5247 2 + 3 * 4 == 14 bytes. */
5248 #define MAX_IT_BLOCK_PREFIX 14
5249
5250 /* Use a quick scan if there are more than this many bytes of
5251 code. */
5252 #define IT_SCAN_THRESHOLD 32
5253
5254 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5255 A breakpoint in an IT block may not be hit, depending on the
5256 condition flags. */
5257 static CORE_ADDR
5258 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5259 {
5260 gdb_byte *buf;
5261 char map_type;
5262 CORE_ADDR boundary, func_start;
5263 int buf_len;
5264 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5265 int i, any, last_it, last_it_count;
5266 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5267
5268 /* If we are using BKPT breakpoints, none of this is necessary. */
5269 if (tdep->thumb2_breakpoint == NULL)
5270 return bpaddr;
5271
5272 /* ARM mode does not have this problem. */
5273 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5274 return bpaddr;
5275
5276 /* We are setting a breakpoint in Thumb code that could potentially
5277 contain an IT block. The first step is to find how much Thumb
5278 code there is; we do not need to read outside of known Thumb
5279 sequences. */
5280 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5281 if (map_type == 0)
5282 /* Thumb-2 code must have mapping symbols to have a chance. */
5283 return bpaddr;
5284
5285 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5286
5287 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5288 && func_start > boundary)
5289 boundary = func_start;
5290
5291 /* Search for a candidate IT instruction. We have to do some fancy
5292 footwork to distinguish a real IT instruction from the second
5293 half of a 32-bit instruction, but there is no need for that if
5294 there's no candidate. */
5295 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5296 if (buf_len == 0)
5297 /* No room for an IT instruction. */
5298 return bpaddr;
5299
5300 buf = (gdb_byte *) xmalloc (buf_len);
5301 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5302 return bpaddr;
5303 any = 0;
5304 for (i = 0; i < buf_len; i += 2)
5305 {
5306 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5307 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5308 {
5309 any = 1;
5310 break;
5311 }
5312 }
5313
5314 if (any == 0)
5315 {
5316 xfree (buf);
5317 return bpaddr;
5318 }
5319
5320 /* OK, the code bytes before this instruction contain at least one
5321 halfword which resembles an IT instruction. We know that it's
5322 Thumb code, but there are still two possibilities. Either the
5323 halfword really is an IT instruction, or it is the second half of
5324 a 32-bit Thumb instruction. The only way we can tell is to
5325 scan forwards from a known instruction boundary. */
5326 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5327 {
5328 int definite;
5329
5330 /* There's a lot of code before this instruction. Start with an
5331 optimistic search; it's easy to recognize halfwords that can
5332 not be the start of a 32-bit instruction, and use that to
5333 lock on to the instruction boundaries. */
5334 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5335 if (buf == NULL)
5336 return bpaddr;
5337 buf_len = IT_SCAN_THRESHOLD;
5338
5339 definite = 0;
5340 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5341 {
5342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5343 if (thumb_insn_size (inst1) == 2)
5344 {
5345 definite = 1;
5346 break;
5347 }
5348 }
5349
5350 /* At this point, if DEFINITE, BUF[I] is the first place we
5351 are sure that we know the instruction boundaries, and it is far
5352 enough from BPADDR that we could not miss an IT instruction
5353 affecting BPADDR. If ! DEFINITE, give up - start from a
5354 known boundary. */
5355 if (! definite)
5356 {
5357 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5358 bpaddr - boundary);
5359 if (buf == NULL)
5360 return bpaddr;
5361 buf_len = bpaddr - boundary;
5362 i = 0;
5363 }
5364 }
5365 else
5366 {
5367 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5368 if (buf == NULL)
5369 return bpaddr;
5370 buf_len = bpaddr - boundary;
5371 i = 0;
5372 }
5373
5374 /* Scan forwards. Find the last IT instruction before BPADDR. */
5375 last_it = -1;
5376 last_it_count = 0;
5377 while (i < buf_len)
5378 {
5379 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5380 last_it_count--;
5381 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5382 {
5383 last_it = i;
5384 if (inst1 & 0x0001)
5385 last_it_count = 4;
5386 else if (inst1 & 0x0002)
5387 last_it_count = 3;
5388 else if (inst1 & 0x0004)
5389 last_it_count = 2;
5390 else
5391 last_it_count = 1;
5392 }
5393 i += thumb_insn_size (inst1);
5394 }
5395
5396 xfree (buf);
5397
5398 if (last_it == -1)
5399 /* There wasn't really an IT instruction after all. */
5400 return bpaddr;
5401
5402 if (last_it_count < 1)
5403 /* It was too far away. */
5404 return bpaddr;
5405
5406 /* This really is a trouble spot. Move the breakpoint to the IT
5407 instruction. */
5408 return bpaddr - buf_len + last_it;
5409 }
5410
5411 /* ARM displaced stepping support.
5412
5413 Generally ARM displaced stepping works as follows:
5414
5415 1. When an instruction is to be single-stepped, it is first decoded by
5416 arm_process_displaced_insn. Depending on the type of instruction, it is
5417 then copied to a scratch location, possibly in a modified form. The
5418 copy_* set of functions performs such modification, as necessary. A
5419 breakpoint is placed after the modified instruction in the scratch space
5420 to return control to GDB. Note in particular that instructions which
5421 modify the PC will no longer do so after modification.
5422
5423 2. The instruction is single-stepped, by setting the PC to the scratch
5424 location address, and resuming. Control returns to GDB when the
5425 breakpoint is hit.
5426
5427 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5428 function used for the current instruction. This function's job is to
5429 put the CPU/memory state back to what it would have been if the
5430 instruction had been executed unmodified in its original location. */
5431
5432 /* NOP instruction (mov r0, r0). */
5433 #define ARM_NOP 0xe1a00000
5434 #define THUMB_NOP 0x4600
5435
5436 /* Helper for register reads for displaced stepping. In particular, this
5437 returns the PC as it would be seen by the instruction at its original
5438 location. */
5439
5440 ULONGEST
5441 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5442 int regno)
5443 {
5444 ULONGEST ret;
5445 CORE_ADDR from = dsc->insn_addr;
5446
5447 if (regno == ARM_PC_REGNUM)
5448 {
5449 /* Compute pipeline offset:
5450 - When executing an ARM instruction, PC reads as the address of the
5451 current instruction plus 8.
5452 - When executing a Thumb instruction, PC reads as the address of the
5453 current instruction plus 4. */
5454
5455 if (!dsc->is_thumb)
5456 from += 8;
5457 else
5458 from += 4;
5459
5460 displaced_debug_printf ("read pc value %.8lx",
5461 (unsigned long) from);
5462 return (ULONGEST) from;
5463 }
5464 else
5465 {
5466 regcache_cooked_read_unsigned (regs, regno, &ret);
5467
5468 displaced_debug_printf ("read r%d value %.8lx",
5469 regno, (unsigned long) ret);
5470
5471 return ret;
5472 }
5473 }
5474
5475 static int
5476 displaced_in_arm_mode (struct regcache *regs)
5477 {
5478 ULONGEST ps;
5479 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5480
5481 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5482
5483 return (ps & t_bit) == 0;
5484 }
5485
5486 /* Write to the PC as from a branch instruction. */
5487
5488 static void
5489 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5490 ULONGEST val)
5491 {
5492 if (!dsc->is_thumb)
5493 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5494 architecture versions < 6. */
5495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5496 val & ~(ULONGEST) 0x3);
5497 else
5498 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5499 val & ~(ULONGEST) 0x1);
5500 }
5501
5502 /* Write to the PC as from a branch-exchange instruction. */
5503
5504 static void
5505 bx_write_pc (struct regcache *regs, ULONGEST val)
5506 {
5507 ULONGEST ps;
5508 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5509
5510 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5511
5512 if ((val & 1) == 1)
5513 {
5514 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5515 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5516 }
5517 else if ((val & 2) == 0)
5518 {
5519 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5520 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5521 }
5522 else
5523 {
5524 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5525 mode, align dest to 4 bytes). */
5526 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5527 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5528 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5529 }
5530 }
5531
5532 /* Write to the PC as if from a load instruction. */
5533
5534 static void
5535 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5536 ULONGEST val)
5537 {
5538 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5539 bx_write_pc (regs, val);
5540 else
5541 branch_write_pc (regs, dsc, val);
5542 }
5543
5544 /* Write to the PC as if from an ALU instruction. */
5545
5546 static void
5547 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5548 ULONGEST val)
5549 {
5550 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5551 bx_write_pc (regs, val);
5552 else
5553 branch_write_pc (regs, dsc, val);
5554 }
5555
5556 /* Helper for writing to registers for displaced stepping. Writing to the PC
5557 has a varying effects depending on the instruction which does the write:
5558 this is controlled by the WRITE_PC argument. */
5559
5560 void
5561 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5562 int regno, ULONGEST val, enum pc_write_style write_pc)
5563 {
5564 if (regno == ARM_PC_REGNUM)
5565 {
5566 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5567
5568 switch (write_pc)
5569 {
5570 case BRANCH_WRITE_PC:
5571 branch_write_pc (regs, dsc, val);
5572 break;
5573
5574 case BX_WRITE_PC:
5575 bx_write_pc (regs, val);
5576 break;
5577
5578 case LOAD_WRITE_PC:
5579 load_write_pc (regs, dsc, val);
5580 break;
5581
5582 case ALU_WRITE_PC:
5583 alu_write_pc (regs, dsc, val);
5584 break;
5585
5586 case CANNOT_WRITE_PC:
5587 warning (_("Instruction wrote to PC in an unexpected way when "
5588 "single-stepping"));
5589 break;
5590
5591 default:
5592 internal_error (_("Invalid argument to displaced_write_reg"));
5593 }
5594
5595 dsc->wrote_to_pc = 1;
5596 }
5597 else
5598 {
5599 displaced_debug_printf ("writing r%d value %.8lx",
5600 regno, (unsigned long) val);
5601 regcache_cooked_write_unsigned (regs, regno, val);
5602 }
5603 }
5604
5605 /* This function is used to concisely determine if an instruction INSN
5606 references PC. Register fields of interest in INSN should have the
5607 corresponding fields of BITMASK set to 0b1111. The function
5608 returns return 1 if any of these fields in INSN reference the PC
5609 (also 0b1111, r15), else it returns 0. */
5610
5611 static int
5612 insn_references_pc (uint32_t insn, uint32_t bitmask)
5613 {
5614 uint32_t lowbit = 1;
5615
5616 while (bitmask != 0)
5617 {
5618 uint32_t mask;
5619
5620 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5621 ;
5622
5623 if (!lowbit)
5624 break;
5625
5626 mask = lowbit * 0xf;
5627
5628 if ((insn & mask) == mask)
5629 return 1;
5630
5631 bitmask &= ~mask;
5632 }
5633
5634 return 0;
5635 }
5636
5637 /* The simplest copy function. Many instructions have the same effect no
5638 matter what address they are executed at: in those cases, use this. */
5639
5640 static int
5641 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5642 arm_displaced_step_copy_insn_closure *dsc)
5643 {
5644 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5645 (unsigned long) insn, iname);
5646
5647 dsc->modinsn[0] = insn;
5648
5649 return 0;
5650 }
5651
5652 static int
5653 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5654 uint16_t insn2, const char *iname,
5655 arm_displaced_step_copy_insn_closure *dsc)
5656 {
5657 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5658 "unmodified", insn1, insn2, iname);
5659
5660 dsc->modinsn[0] = insn1;
5661 dsc->modinsn[1] = insn2;
5662 dsc->numinsns = 2;
5663
5664 return 0;
5665 }
5666
5667 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5668 modification. */
5669 static int
5670 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5671 const char *iname,
5672 arm_displaced_step_copy_insn_closure *dsc)
5673 {
5674 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5675 insn, iname);
5676
5677 dsc->modinsn[0] = insn;
5678
5679 return 0;
5680 }
5681
5682 /* Preload instructions with immediate offset. */
5683
5684 static void
5685 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5686 arm_displaced_step_copy_insn_closure *dsc)
5687 {
5688 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5689 if (!dsc->u.preload.immed)
5690 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5691 }
5692
5693 static void
5694 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5695 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5696 {
5697 ULONGEST rn_val;
5698 /* Preload instructions:
5699
5700 {pli/pld} [rn, #+/-imm]
5701 ->
5702 {pli/pld} [r0, #+/-imm]. */
5703
5704 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5705 rn_val = displaced_read_reg (regs, dsc, rn);
5706 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5707 dsc->u.preload.immed = 1;
5708
5709 dsc->cleanup = &cleanup_preload;
5710 }
5711
5712 static int
5713 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5714 arm_displaced_step_copy_insn_closure *dsc)
5715 {
5716 unsigned int rn = bits (insn, 16, 19);
5717
5718 if (!insn_references_pc (insn, 0x000f0000ul))
5719 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5720
5721 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5722
5723 dsc->modinsn[0] = insn & 0xfff0ffff;
5724
5725 install_preload (gdbarch, regs, dsc, rn);
5726
5727 return 0;
5728 }
5729
5730 static int
5731 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5732 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5733 {
5734 unsigned int rn = bits (insn1, 0, 3);
5735 unsigned int u_bit = bit (insn1, 7);
5736 int imm12 = bits (insn2, 0, 11);
5737 ULONGEST pc_val;
5738
5739 if (rn != ARM_PC_REGNUM)
5740 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5741
5742 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5743 PLD (literal) Encoding T1. */
5744 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5745 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5746 imm12);
5747
5748 if (!u_bit)
5749 imm12 = -1 * imm12;
5750
5751 /* Rewrite instruction {pli/pld} PC imm12 into:
5752 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5753
5754 {pli/pld} [r0, r1]
5755
5756 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5757
5758 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5759 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5760
5761 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5762
5763 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5764 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5765 dsc->u.preload.immed = 0;
5766
5767 /* {pli/pld} [r0, r1] */
5768 dsc->modinsn[0] = insn1 & 0xfff0;
5769 dsc->modinsn[1] = 0xf001;
5770 dsc->numinsns = 2;
5771
5772 dsc->cleanup = &cleanup_preload;
5773 return 0;
5774 }
5775
5776 /* Preload instructions with register offset. */
5777
5778 static void
5779 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5780 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5781 unsigned int rm)
5782 {
5783 ULONGEST rn_val, rm_val;
5784
5785 /* Preload register-offset instructions:
5786
5787 {pli/pld} [rn, rm {, shift}]
5788 ->
5789 {pli/pld} [r0, r1 {, shift}]. */
5790
5791 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5792 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5793 rn_val = displaced_read_reg (regs, dsc, rn);
5794 rm_val = displaced_read_reg (regs, dsc, rm);
5795 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5796 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5797 dsc->u.preload.immed = 0;
5798
5799 dsc->cleanup = &cleanup_preload;
5800 }
5801
5802 static int
5803 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5804 struct regcache *regs,
5805 arm_displaced_step_copy_insn_closure *dsc)
5806 {
5807 unsigned int rn = bits (insn, 16, 19);
5808 unsigned int rm = bits (insn, 0, 3);
5809
5810
5811 if (!insn_references_pc (insn, 0x000f000ful))
5812 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5813
5814 displaced_debug_printf ("copying preload insn %.8lx",
5815 (unsigned long) insn);
5816
5817 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5818
5819 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5820 return 0;
5821 }
5822
5823 /* Copy/cleanup coprocessor load and store instructions. */
5824
5825 static void
5826 cleanup_copro_load_store (struct gdbarch *gdbarch,
5827 struct regcache *regs,
5828 arm_displaced_step_copy_insn_closure *dsc)
5829 {
5830 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5831
5832 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5833
5834 if (dsc->u.ldst.writeback)
5835 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5836 }
5837
5838 static void
5839 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5840 arm_displaced_step_copy_insn_closure *dsc,
5841 int writeback, unsigned int rn)
5842 {
5843 ULONGEST rn_val;
5844
5845 /* Coprocessor load/store instructions:
5846
5847 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5848 ->
5849 {stc/stc2} [r0, #+/-imm].
5850
5851 ldc/ldc2 are handled identically. */
5852
5853 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5854 rn_val = displaced_read_reg (regs, dsc, rn);
5855 /* PC should be 4-byte aligned. */
5856 rn_val = rn_val & 0xfffffffc;
5857 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5858
5859 dsc->u.ldst.writeback = writeback;
5860 dsc->u.ldst.rn = rn;
5861
5862 dsc->cleanup = &cleanup_copro_load_store;
5863 }
5864
5865 static int
5866 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5867 struct regcache *regs,
5868 arm_displaced_step_copy_insn_closure *dsc)
5869 {
5870 unsigned int rn = bits (insn, 16, 19);
5871
5872 if (!insn_references_pc (insn, 0x000f0000ul))
5873 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5874
5875 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5876 (unsigned long) insn);
5877
5878 dsc->modinsn[0] = insn & 0xfff0ffff;
5879
5880 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5881
5882 return 0;
5883 }
5884
5885 static int
5886 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5887 uint16_t insn2, struct regcache *regs,
5888 arm_displaced_step_copy_insn_closure *dsc)
5889 {
5890 unsigned int rn = bits (insn1, 0, 3);
5891
5892 if (rn != ARM_PC_REGNUM)
5893 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5894 "copro load/store", dsc);
5895
5896 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5897 insn1, insn2);
5898
5899 dsc->modinsn[0] = insn1 & 0xfff0;
5900 dsc->modinsn[1] = insn2;
5901 dsc->numinsns = 2;
5902
5903 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5904 doesn't support writeback, so pass 0. */
5905 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5906
5907 return 0;
5908 }
5909
5910 /* Clean up branch instructions (actually perform the branch, by setting
5911 PC). */
5912
5913 static void
5914 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5915 arm_displaced_step_copy_insn_closure *dsc)
5916 {
5917 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5918 int branch_taken = condition_true (dsc->u.branch.cond, status);
5919 enum pc_write_style write_pc = dsc->u.branch.exchange
5920 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5921
5922 if (!branch_taken)
5923 return;
5924
5925 if (dsc->u.branch.link)
5926 {
5927 /* The value of LR should be the next insn of current one. In order
5928 not to confuse logic handling later insn `bx lr', if current insn mode
5929 is Thumb, the bit 0 of LR value should be set to 1. */
5930 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5931
5932 if (dsc->is_thumb)
5933 next_insn_addr |= 0x1;
5934
5935 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5936 CANNOT_WRITE_PC);
5937 }
5938
5939 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5940 }
5941
5942 /* Copy B/BL/BLX instructions with immediate destinations. */
5943
5944 static void
5945 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5946 arm_displaced_step_copy_insn_closure *dsc,
5947 unsigned int cond, int exchange, int link, long offset)
5948 {
5949 /* Implement "BL<cond> <label>" as:
5950
5951 Preparation: cond <- instruction condition
5952 Insn: mov r0, r0 (nop)
5953 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5954
5955 B<cond> similar, but don't set r14 in cleanup. */
5956
5957 dsc->u.branch.cond = cond;
5958 dsc->u.branch.link = link;
5959 dsc->u.branch.exchange = exchange;
5960
5961 dsc->u.branch.dest = dsc->insn_addr;
5962 if (link && exchange)
5963 /* For BLX, offset is computed from the Align (PC, 4). */
5964 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5965
5966 if (dsc->is_thumb)
5967 dsc->u.branch.dest += 4 + offset;
5968 else
5969 dsc->u.branch.dest += 8 + offset;
5970
5971 dsc->cleanup = &cleanup_branch;
5972 }
5973 static int
5974 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5975 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5976 {
5977 unsigned int cond = bits (insn, 28, 31);
5978 int exchange = (cond == 0xf);
5979 int link = exchange || bit (insn, 24);
5980 long offset;
5981
5982 displaced_debug_printf ("copying %s immediate insn %.8lx",
5983 (exchange) ? "blx" : (link) ? "bl" : "b",
5984 (unsigned long) insn);
5985 if (exchange)
5986 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5987 then arrange the switch into Thumb mode. */
5988 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5989 else
5990 offset = bits (insn, 0, 23) << 2;
5991
5992 if (bit (offset, 25))
5993 offset = offset | ~0x3ffffff;
5994
5995 dsc->modinsn[0] = ARM_NOP;
5996
5997 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5998 return 0;
5999 }
6000
6001 static int
6002 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6003 uint16_t insn2, struct regcache *regs,
6004 arm_displaced_step_copy_insn_closure *dsc)
6005 {
6006 int link = bit (insn2, 14);
6007 int exchange = link && !bit (insn2, 12);
6008 int cond = INST_AL;
6009 long offset = 0;
6010 int j1 = bit (insn2, 13);
6011 int j2 = bit (insn2, 11);
6012 int s = sbits (insn1, 10, 10);
6013 int i1 = !(j1 ^ bit (insn1, 10));
6014 int i2 = !(j2 ^ bit (insn1, 10));
6015
6016 if (!link && !exchange) /* B */
6017 {
6018 offset = (bits (insn2, 0, 10) << 1);
6019 if (bit (insn2, 12)) /* Encoding T4 */
6020 {
6021 offset |= (bits (insn1, 0, 9) << 12)
6022 | (i2 << 22)
6023 | (i1 << 23)
6024 | (s << 24);
6025 cond = INST_AL;
6026 }
6027 else /* Encoding T3 */
6028 {
6029 offset |= (bits (insn1, 0, 5) << 12)
6030 | (j1 << 18)
6031 | (j2 << 19)
6032 | (s << 20);
6033 cond = bits (insn1, 6, 9);
6034 }
6035 }
6036 else
6037 {
6038 offset = (bits (insn1, 0, 9) << 12);
6039 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6040 offset |= exchange ?
6041 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6042 }
6043
6044 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6045 link ? (exchange) ? "blx" : "bl" : "b",
6046 insn1, insn2, offset);
6047
6048 dsc->modinsn[0] = THUMB_NOP;
6049
6050 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6051 return 0;
6052 }
6053
6054 /* Copy B Thumb instructions. */
6055 static int
6056 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6057 arm_displaced_step_copy_insn_closure *dsc)
6058 {
6059 unsigned int cond = 0;
6060 int offset = 0;
6061 unsigned short bit_12_15 = bits (insn, 12, 15);
6062 CORE_ADDR from = dsc->insn_addr;
6063
6064 if (bit_12_15 == 0xd)
6065 {
6066 /* offset = SignExtend (imm8:0, 32) */
6067 offset = sbits ((insn << 1), 0, 8);
6068 cond = bits (insn, 8, 11);
6069 }
6070 else if (bit_12_15 == 0xe) /* Encoding T2 */
6071 {
6072 offset = sbits ((insn << 1), 0, 11);
6073 cond = INST_AL;
6074 }
6075
6076 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6077 insn, offset);
6078
6079 dsc->u.branch.cond = cond;
6080 dsc->u.branch.link = 0;
6081 dsc->u.branch.exchange = 0;
6082 dsc->u.branch.dest = from + 4 + offset;
6083
6084 dsc->modinsn[0] = THUMB_NOP;
6085
6086 dsc->cleanup = &cleanup_branch;
6087
6088 return 0;
6089 }
6090
6091 /* Copy BX/BLX with register-specified destinations. */
6092
6093 static void
6094 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6095 arm_displaced_step_copy_insn_closure *dsc, int link,
6096 unsigned int cond, unsigned int rm)
6097 {
6098 /* Implement {BX,BLX}<cond> <reg>" as:
6099
6100 Preparation: cond <- instruction condition
6101 Insn: mov r0, r0 (nop)
6102 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6103
6104 Don't set r14 in cleanup for BX. */
6105
6106 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6107
6108 dsc->u.branch.cond = cond;
6109 dsc->u.branch.link = link;
6110
6111 dsc->u.branch.exchange = 1;
6112
6113 dsc->cleanup = &cleanup_branch;
6114 }
6115
6116 static int
6117 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6118 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6119 {
6120 unsigned int cond = bits (insn, 28, 31);
6121 /* BX: x12xxx1x
6122 BLX: x12xxx3x. */
6123 int link = bit (insn, 5);
6124 unsigned int rm = bits (insn, 0, 3);
6125
6126 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6127
6128 dsc->modinsn[0] = ARM_NOP;
6129
6130 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6131 return 0;
6132 }
6133
6134 static int
6135 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6136 struct regcache *regs,
6137 arm_displaced_step_copy_insn_closure *dsc)
6138 {
6139 int link = bit (insn, 7);
6140 unsigned int rm = bits (insn, 3, 6);
6141
6142 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6143
6144 dsc->modinsn[0] = THUMB_NOP;
6145
6146 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6147
6148 return 0;
6149 }
6150
6151
6152 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6153
6154 static void
6155 cleanup_alu_imm (struct gdbarch *gdbarch,
6156 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6157 {
6158 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6159 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6160 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6161 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6162 }
6163
6164 static int
6165 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6166 arm_displaced_step_copy_insn_closure *dsc)
6167 {
6168 unsigned int rn = bits (insn, 16, 19);
6169 unsigned int rd = bits (insn, 12, 15);
6170 unsigned int op = bits (insn, 21, 24);
6171 int is_mov = (op == 0xd);
6172 ULONGEST rd_val, rn_val;
6173
6174 if (!insn_references_pc (insn, 0x000ff000ul))
6175 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6176
6177 displaced_debug_printf ("copying immediate %s insn %.8lx",
6178 is_mov ? "move" : "ALU",
6179 (unsigned long) insn);
6180
6181 /* Instruction is of form:
6182
6183 <op><cond> rd, [rn,] #imm
6184
6185 Rewrite as:
6186
6187 Preparation: tmp1, tmp2 <- r0, r1;
6188 r0, r1 <- rd, rn
6189 Insn: <op><cond> r0, r1, #imm
6190 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6191 */
6192
6193 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6194 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6195 rn_val = displaced_read_reg (regs, dsc, rn);
6196 rd_val = displaced_read_reg (regs, dsc, rd);
6197 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6198 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6199 dsc->rd = rd;
6200
6201 if (is_mov)
6202 dsc->modinsn[0] = insn & 0xfff00fff;
6203 else
6204 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6205
6206 dsc->cleanup = &cleanup_alu_imm;
6207
6208 return 0;
6209 }
6210
6211 static int
6212 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6213 uint16_t insn2, struct regcache *regs,
6214 arm_displaced_step_copy_insn_closure *dsc)
6215 {
6216 unsigned int op = bits (insn1, 5, 8);
6217 unsigned int rn, rm, rd;
6218 ULONGEST rd_val, rn_val;
6219
6220 rn = bits (insn1, 0, 3); /* Rn */
6221 rm = bits (insn2, 0, 3); /* Rm */
6222 rd = bits (insn2, 8, 11); /* Rd */
6223
6224 /* This routine is only called for instruction MOV. */
6225 gdb_assert (op == 0x2 && rn == 0xf);
6226
6227 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6228 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6229
6230 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6231
6232 /* Instruction is of form:
6233
6234 <op><cond> rd, [rn,] #imm
6235
6236 Rewrite as:
6237
6238 Preparation: tmp1, tmp2 <- r0, r1;
6239 r0, r1 <- rd, rn
6240 Insn: <op><cond> r0, r1, #imm
6241 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6242 */
6243
6244 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6245 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6246 rn_val = displaced_read_reg (regs, dsc, rn);
6247 rd_val = displaced_read_reg (regs, dsc, rd);
6248 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6249 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6250 dsc->rd = rd;
6251
6252 dsc->modinsn[0] = insn1;
6253 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6254 dsc->numinsns = 2;
6255
6256 dsc->cleanup = &cleanup_alu_imm;
6257
6258 return 0;
6259 }
6260
6261 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6262
6263 static void
6264 cleanup_alu_reg (struct gdbarch *gdbarch,
6265 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6266 {
6267 ULONGEST rd_val;
6268 int i;
6269
6270 rd_val = displaced_read_reg (regs, dsc, 0);
6271
6272 for (i = 0; i < 3; i++)
6273 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6274
6275 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6276 }
6277
6278 static void
6279 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6280 arm_displaced_step_copy_insn_closure *dsc,
6281 unsigned int rd, unsigned int rn, unsigned int rm)
6282 {
6283 ULONGEST rd_val, rn_val, rm_val;
6284
6285 /* Instruction is of form:
6286
6287 <op><cond> rd, [rn,] rm [, <shift>]
6288
6289 Rewrite as:
6290
6291 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6292 r0, r1, r2 <- rd, rn, rm
6293 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6294 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6295 */
6296
6297 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6298 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6299 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6300 rd_val = displaced_read_reg (regs, dsc, rd);
6301 rn_val = displaced_read_reg (regs, dsc, rn);
6302 rm_val = displaced_read_reg (regs, dsc, rm);
6303 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6304 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6305 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6306 dsc->rd = rd;
6307
6308 dsc->cleanup = &cleanup_alu_reg;
6309 }
6310
6311 static int
6312 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6313 arm_displaced_step_copy_insn_closure *dsc)
6314 {
6315 unsigned int op = bits (insn, 21, 24);
6316 int is_mov = (op == 0xd);
6317
6318 if (!insn_references_pc (insn, 0x000ff00ful))
6319 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6320
6321 displaced_debug_printf ("copying reg %s insn %.8lx",
6322 is_mov ? "move" : "ALU", (unsigned long) insn);
6323
6324 if (is_mov)
6325 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6326 else
6327 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6328
6329 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6330 bits (insn, 0, 3));
6331 return 0;
6332 }
6333
6334 static int
6335 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6336 struct regcache *regs,
6337 arm_displaced_step_copy_insn_closure *dsc)
6338 {
6339 unsigned rm, rd;
6340
6341 rm = bits (insn, 3, 6);
6342 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6343
6344 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6345 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6346
6347 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6348
6349 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6350
6351 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6352
6353 return 0;
6354 }
6355
6356 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6357
6358 static void
6359 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6360 struct regcache *regs,
6361 arm_displaced_step_copy_insn_closure *dsc)
6362 {
6363 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6364 int i;
6365
6366 for (i = 0; i < 4; i++)
6367 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6368
6369 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6370 }
6371
6372 static void
6373 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6374 arm_displaced_step_copy_insn_closure *dsc,
6375 unsigned int rd, unsigned int rn, unsigned int rm,
6376 unsigned rs)
6377 {
6378 int i;
6379 ULONGEST rd_val, rn_val, rm_val, rs_val;
6380
6381 /* Instruction is of form:
6382
6383 <op><cond> rd, [rn,] rm, <shift> rs
6384
6385 Rewrite as:
6386
6387 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6388 r0, r1, r2, r3 <- rd, rn, rm, rs
6389 Insn: <op><cond> r0, r1, r2, <shift> r3
6390 Cleanup: tmp5 <- r0
6391 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6392 rd <- tmp5
6393 */
6394
6395 for (i = 0; i < 4; i++)
6396 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6397
6398 rd_val = displaced_read_reg (regs, dsc, rd);
6399 rn_val = displaced_read_reg (regs, dsc, rn);
6400 rm_val = displaced_read_reg (regs, dsc, rm);
6401 rs_val = displaced_read_reg (regs, dsc, rs);
6402 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6403 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6404 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6405 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6406 dsc->rd = rd;
6407 dsc->cleanup = &cleanup_alu_shifted_reg;
6408 }
6409
6410 static int
6411 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6412 struct regcache *regs,
6413 arm_displaced_step_copy_insn_closure *dsc)
6414 {
6415 unsigned int op = bits (insn, 21, 24);
6416 int is_mov = (op == 0xd);
6417 unsigned int rd, rn, rm, rs;
6418
6419 if (!insn_references_pc (insn, 0x000fff0ful))
6420 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6421
6422 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6423 is_mov ? "move" : "ALU",
6424 (unsigned long) insn);
6425
6426 rn = bits (insn, 16, 19);
6427 rm = bits (insn, 0, 3);
6428 rs = bits (insn, 8, 11);
6429 rd = bits (insn, 12, 15);
6430
6431 if (is_mov)
6432 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6433 else
6434 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6435
6436 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6437
6438 return 0;
6439 }
6440
6441 /* Clean up load instructions. */
6442
6443 static void
6444 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6445 arm_displaced_step_copy_insn_closure *dsc)
6446 {
6447 ULONGEST rt_val, rt_val2 = 0, rn_val;
6448
6449 rt_val = displaced_read_reg (regs, dsc, 0);
6450 if (dsc->u.ldst.xfersize == 8)
6451 rt_val2 = displaced_read_reg (regs, dsc, 1);
6452 rn_val = displaced_read_reg (regs, dsc, 2);
6453
6454 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6455 if (dsc->u.ldst.xfersize > 4)
6456 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6457 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6458 if (!dsc->u.ldst.immed)
6459 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6460
6461 /* Handle register writeback. */
6462 if (dsc->u.ldst.writeback)
6463 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6464 /* Put result in right place. */
6465 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6466 if (dsc->u.ldst.xfersize == 8)
6467 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6468 }
6469
6470 /* Clean up store instructions. */
6471
6472 static void
6473 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6474 arm_displaced_step_copy_insn_closure *dsc)
6475 {
6476 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6477
6478 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6479 if (dsc->u.ldst.xfersize > 4)
6480 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6481 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6482 if (!dsc->u.ldst.immed)
6483 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6484 if (!dsc->u.ldst.restore_r4)
6485 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6486
6487 /* Writeback. */
6488 if (dsc->u.ldst.writeback)
6489 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6490 }
6491
6492 /* Copy "extra" load/store instructions. These are halfword/doubleword
6493 transfers, which have a different encoding to byte/word transfers. */
6494
6495 static int
6496 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6497 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6498 {
6499 unsigned int op1 = bits (insn, 20, 24);
6500 unsigned int op2 = bits (insn, 5, 6);
6501 unsigned int rt = bits (insn, 12, 15);
6502 unsigned int rn = bits (insn, 16, 19);
6503 unsigned int rm = bits (insn, 0, 3);
6504 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6505 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6506 int immed = (op1 & 0x4) != 0;
6507 int opcode;
6508 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6509
6510 if (!insn_references_pc (insn, 0x000ff00ful))
6511 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6512
6513 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6514 unprivileged ? "unprivileged " : "",
6515 (unsigned long) insn);
6516
6517 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6518
6519 if (opcode < 0)
6520 internal_error (_("copy_extra_ld_st: instruction decode error"));
6521
6522 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6523 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6524 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6525 if (!immed)
6526 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6527
6528 rt_val = displaced_read_reg (regs, dsc, rt);
6529 if (bytesize[opcode] == 8)
6530 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6531 rn_val = displaced_read_reg (regs, dsc, rn);
6532 if (!immed)
6533 rm_val = displaced_read_reg (regs, dsc, rm);
6534
6535 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6536 if (bytesize[opcode] == 8)
6537 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6538 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6539 if (!immed)
6540 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6541
6542 dsc->rd = rt;
6543 dsc->u.ldst.xfersize = bytesize[opcode];
6544 dsc->u.ldst.rn = rn;
6545 dsc->u.ldst.immed = immed;
6546 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6547 dsc->u.ldst.restore_r4 = 0;
6548
6549 if (immed)
6550 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6551 ->
6552 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6553 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6554 else
6555 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6556 ->
6557 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6558 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6559
6560 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6561
6562 return 0;
6563 }
6564
6565 /* Copy byte/half word/word loads and stores. */
6566
6567 static void
6568 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6569 arm_displaced_step_copy_insn_closure *dsc, int load,
6570 int immed, int writeback, int size, int usermode,
6571 int rt, int rm, int rn)
6572 {
6573 ULONGEST rt_val, rn_val, rm_val = 0;
6574
6575 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6576 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6577 if (!immed)
6578 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6579 if (!load)
6580 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6581
6582 rt_val = displaced_read_reg (regs, dsc, rt);
6583 rn_val = displaced_read_reg (regs, dsc, rn);
6584 if (!immed)
6585 rm_val = displaced_read_reg (regs, dsc, rm);
6586
6587 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6588 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6589 if (!immed)
6590 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6591 dsc->rd = rt;
6592 dsc->u.ldst.xfersize = size;
6593 dsc->u.ldst.rn = rn;
6594 dsc->u.ldst.immed = immed;
6595 dsc->u.ldst.writeback = writeback;
6596
6597 /* To write PC we can do:
6598
6599 Before this sequence of instructions:
6600 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6601 r2 is the Rn value got from displaced_read_reg.
6602
6603 Insn1: push {pc} Write address of STR instruction + offset on stack
6604 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6605 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6606 = addr(Insn1) + offset - addr(Insn3) - 8
6607 = offset - 16
6608 Insn4: add r4, r4, #8 r4 = offset - 8
6609 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6610 = from + offset
6611 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6612
6613 Otherwise we don't know what value to write for PC, since the offset is
6614 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6615 of this can be found in Section "Saving from r15" in
6616 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6617
6618 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6619 }
6620
6621
6622 static int
6623 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6624 uint16_t insn2, struct regcache *regs,
6625 arm_displaced_step_copy_insn_closure *dsc, int size)
6626 {
6627 unsigned int u_bit = bit (insn1, 7);
6628 unsigned int rt = bits (insn2, 12, 15);
6629 int imm12 = bits (insn2, 0, 11);
6630 ULONGEST pc_val;
6631
6632 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6633 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6634 imm12);
6635
6636 if (!u_bit)
6637 imm12 = -1 * imm12;
6638
6639 /* Rewrite instruction LDR Rt imm12 into:
6640
6641 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6642
6643 LDR R0, R2, R3,
6644
6645 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6646
6647
6648 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6649 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6650 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6651
6652 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6653
6654 pc_val = pc_val & 0xfffffffc;
6655
6656 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6657 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6658
6659 dsc->rd = rt;
6660
6661 dsc->u.ldst.xfersize = size;
6662 dsc->u.ldst.immed = 0;
6663 dsc->u.ldst.writeback = 0;
6664 dsc->u.ldst.restore_r4 = 0;
6665
6666 /* LDR R0, R2, R3 */
6667 dsc->modinsn[0] = 0xf852;
6668 dsc->modinsn[1] = 0x3;
6669 dsc->numinsns = 2;
6670
6671 dsc->cleanup = &cleanup_load;
6672
6673 return 0;
6674 }
6675
6676 static int
6677 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6678 uint16_t insn2, struct regcache *regs,
6679 arm_displaced_step_copy_insn_closure *dsc,
6680 int writeback, int immed)
6681 {
6682 unsigned int rt = bits (insn2, 12, 15);
6683 unsigned int rn = bits (insn1, 0, 3);
6684 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6685 /* In LDR (register), there is also a register Rm, which is not allowed to
6686 be PC, so we don't have to check it. */
6687
6688 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6689 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6690 dsc);
6691
6692 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6693 rt, rn, insn1, insn2);
6694
6695 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6696 0, rt, rm, rn);
6697
6698 dsc->u.ldst.restore_r4 = 0;
6699
6700 if (immed)
6701 /* ldr[b]<cond> rt, [rn, #imm], etc.
6702 ->
6703 ldr[b]<cond> r0, [r2, #imm]. */
6704 {
6705 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6706 dsc->modinsn[1] = insn2 & 0x0fff;
6707 }
6708 else
6709 /* ldr[b]<cond> rt, [rn, rm], etc.
6710 ->
6711 ldr[b]<cond> r0, [r2, r3]. */
6712 {
6713 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6714 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6715 }
6716
6717 dsc->numinsns = 2;
6718
6719 return 0;
6720 }
6721
6722
6723 static int
6724 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6725 struct regcache *regs,
6726 arm_displaced_step_copy_insn_closure *dsc,
6727 int load, int size, int usermode)
6728 {
6729 int immed = !bit (insn, 25);
6730 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6731 unsigned int rt = bits (insn, 12, 15);
6732 unsigned int rn = bits (insn, 16, 19);
6733 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6734
6735 if (!insn_references_pc (insn, 0x000ff00ful))
6736 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6737
6738 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6739 load ? (size == 1 ? "ldrb" : "ldr")
6740 : (size == 1 ? "strb" : "str"),
6741 usermode ? "t" : "",
6742 rt, rn,
6743 (unsigned long) insn);
6744
6745 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6746 usermode, rt, rm, rn);
6747
6748 if (load || rt != ARM_PC_REGNUM)
6749 {
6750 dsc->u.ldst.restore_r4 = 0;
6751
6752 if (immed)
6753 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6754 ->
6755 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6756 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6757 else
6758 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6759 ->
6760 {ldr,str}[b]<cond> r0, [r2, r3]. */
6761 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6762 }
6763 else
6764 {
6765 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6766 dsc->u.ldst.restore_r4 = 1;
6767 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6768 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6769 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6770 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6771 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6772
6773 /* As above. */
6774 if (immed)
6775 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6776 else
6777 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6778
6779 dsc->numinsns = 6;
6780 }
6781
6782 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6783
6784 return 0;
6785 }
6786
6787 /* Cleanup LDM instructions with fully-populated register list. This is an
6788 unfortunate corner case: it's impossible to implement correctly by modifying
6789 the instruction. The issue is as follows: we have an instruction,
6790
6791 ldm rN, {r0-r15}
6792
6793 which we must rewrite to avoid loading PC. A possible solution would be to
6794 do the load in two halves, something like (with suitable cleanup
6795 afterwards):
6796
6797 mov r8, rN
6798 ldm[id][ab] r8!, {r0-r7}
6799 str r7, <temp>
6800 ldm[id][ab] r8, {r7-r14}
6801 <bkpt>
6802
6803 but at present there's no suitable place for <temp>, since the scratch space
6804 is overwritten before the cleanup routine is called. For now, we simply
6805 emulate the instruction. */
6806
6807 static void
6808 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6809 arm_displaced_step_copy_insn_closure *dsc)
6810 {
6811 int inc = dsc->u.block.increment;
6812 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6813 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6814 uint32_t regmask = dsc->u.block.regmask;
6815 int regno = inc ? 0 : 15;
6816 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6817 int exception_return = dsc->u.block.load && dsc->u.block.user
6818 && (regmask & 0x8000) != 0;
6819 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6820 int do_transfer = condition_true (dsc->u.block.cond, status);
6821 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6822
6823 if (!do_transfer)
6824 return;
6825
6826 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6827 sensible we can do here. Complain loudly. */
6828 if (exception_return)
6829 error (_("Cannot single-step exception return"));
6830
6831 /* We don't handle any stores here for now. */
6832 gdb_assert (dsc->u.block.load != 0);
6833
6834 displaced_debug_printf ("emulating block transfer: %s %s %s",
6835 dsc->u.block.load ? "ldm" : "stm",
6836 dsc->u.block.increment ? "inc" : "dec",
6837 dsc->u.block.before ? "before" : "after");
6838
6839 while (regmask)
6840 {
6841 uint32_t memword;
6842
6843 if (inc)
6844 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6845 regno++;
6846 else
6847 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6848 regno--;
6849
6850 xfer_addr += bump_before;
6851
6852 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6853 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6854
6855 xfer_addr += bump_after;
6856
6857 regmask &= ~(1 << regno);
6858 }
6859
6860 if (dsc->u.block.writeback)
6861 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6862 CANNOT_WRITE_PC);
6863 }
6864
6865 /* Clean up an STM which included the PC in the register list. */
6866
6867 static void
6868 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6869 arm_displaced_step_copy_insn_closure *dsc)
6870 {
6871 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6872 int store_executed = condition_true (dsc->u.block.cond, status);
6873 CORE_ADDR pc_stored_at, transferred_regs
6874 = count_one_bits (dsc->u.block.regmask);
6875 CORE_ADDR stm_insn_addr;
6876 uint32_t pc_val;
6877 long offset;
6878 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6879
6880 /* If condition code fails, there's nothing else to do. */
6881 if (!store_executed)
6882 return;
6883
6884 if (dsc->u.block.increment)
6885 {
6886 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6887
6888 if (dsc->u.block.before)
6889 pc_stored_at += 4;
6890 }
6891 else
6892 {
6893 pc_stored_at = dsc->u.block.xfer_addr;
6894
6895 if (dsc->u.block.before)
6896 pc_stored_at -= 4;
6897 }
6898
6899 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6900 stm_insn_addr = dsc->scratch_base;
6901 offset = pc_val - stm_insn_addr;
6902
6903 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6904 offset);
6905
6906 /* Rewrite the stored PC to the proper value for the non-displaced original
6907 instruction. */
6908 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6909 dsc->insn_addr + offset);
6910 }
6911
6912 /* Clean up an LDM which includes the PC in the register list. We clumped all
6913 the registers in the transferred list into a contiguous range r0...rX (to
6914 avoid loading PC directly and losing control of the debugged program), so we
6915 must undo that here. */
6916
6917 static void
6918 cleanup_block_load_pc (struct gdbarch *gdbarch,
6919 struct regcache *regs,
6920 arm_displaced_step_copy_insn_closure *dsc)
6921 {
6922 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6923 int load_executed = condition_true (dsc->u.block.cond, status);
6924 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6925 unsigned int regs_loaded = count_one_bits (mask);
6926 unsigned int num_to_shuffle = regs_loaded, clobbered;
6927
6928 /* The method employed here will fail if the register list is fully populated
6929 (we need to avoid loading PC directly). */
6930 gdb_assert (num_to_shuffle < 16);
6931
6932 if (!load_executed)
6933 return;
6934
6935 clobbered = (1 << num_to_shuffle) - 1;
6936
6937 while (num_to_shuffle > 0)
6938 {
6939 if ((mask & (1 << write_reg)) != 0)
6940 {
6941 unsigned int read_reg = num_to_shuffle - 1;
6942
6943 if (read_reg != write_reg)
6944 {
6945 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6946 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6947 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6948 read_reg, write_reg);
6949 }
6950 else
6951 displaced_debug_printf ("LDM: register r%d already in the right "
6952 "place", write_reg);
6953
6954 clobbered &= ~(1 << write_reg);
6955
6956 num_to_shuffle--;
6957 }
6958
6959 write_reg--;
6960 }
6961
6962 /* Restore any registers we scribbled over. */
6963 for (write_reg = 0; clobbered != 0; write_reg++)
6964 {
6965 if ((clobbered & (1 << write_reg)) != 0)
6966 {
6967 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6968 CANNOT_WRITE_PC);
6969 displaced_debug_printf ("LDM: restored clobbered register r%d",
6970 write_reg);
6971 clobbered &= ~(1 << write_reg);
6972 }
6973 }
6974
6975 /* Perform register writeback manually. */
6976 if (dsc->u.block.writeback)
6977 {
6978 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6979
6980 if (dsc->u.block.increment)
6981 new_rn_val += regs_loaded * 4;
6982 else
6983 new_rn_val -= regs_loaded * 4;
6984
6985 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6986 CANNOT_WRITE_PC);
6987 }
6988 }
6989
6990 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6991 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6992
6993 static int
6994 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6995 struct regcache *regs,
6996 arm_displaced_step_copy_insn_closure *dsc)
6997 {
6998 int load = bit (insn, 20);
6999 int user = bit (insn, 22);
7000 int increment = bit (insn, 23);
7001 int before = bit (insn, 24);
7002 int writeback = bit (insn, 21);
7003 int rn = bits (insn, 16, 19);
7004
7005 /* Block transfers which don't mention PC can be run directly
7006 out-of-line. */
7007 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7008 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7009
7010 if (rn == ARM_PC_REGNUM)
7011 {
7012 warning (_("displaced: Unpredictable LDM or STM with "
7013 "base register r15"));
7014 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7015 }
7016
7017 displaced_debug_printf ("copying block transfer insn %.8lx",
7018 (unsigned long) insn);
7019
7020 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7021 dsc->u.block.rn = rn;
7022
7023 dsc->u.block.load = load;
7024 dsc->u.block.user = user;
7025 dsc->u.block.increment = increment;
7026 dsc->u.block.before = before;
7027 dsc->u.block.writeback = writeback;
7028 dsc->u.block.cond = bits (insn, 28, 31);
7029
7030 dsc->u.block.regmask = insn & 0xffff;
7031
7032 if (load)
7033 {
7034 if ((insn & 0xffff) == 0xffff)
7035 {
7036 /* LDM with a fully-populated register list. This case is
7037 particularly tricky. Implement for now by fully emulating the
7038 instruction (which might not behave perfectly in all cases, but
7039 these instructions should be rare enough for that not to matter
7040 too much). */
7041 dsc->modinsn[0] = ARM_NOP;
7042
7043 dsc->cleanup = &cleanup_block_load_all;
7044 }
7045 else
7046 {
7047 /* LDM of a list of registers which includes PC. Implement by
7048 rewriting the list of registers to be transferred into a
7049 contiguous chunk r0...rX before doing the transfer, then shuffling
7050 registers into the correct places in the cleanup routine. */
7051 unsigned int regmask = insn & 0xffff;
7052 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7053 unsigned int i;
7054
7055 for (i = 0; i < num_in_list; i++)
7056 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7057
7058 /* Writeback makes things complicated. We need to avoid clobbering
7059 the base register with one of the registers in our modified
7060 register list, but just using a different register can't work in
7061 all cases, e.g.:
7062
7063 ldm r14!, {r0-r13,pc}
7064
7065 which would need to be rewritten as:
7066
7067 ldm rN!, {r0-r14}
7068
7069 but that can't work, because there's no free register for N.
7070
7071 Solve this by turning off the writeback bit, and emulating
7072 writeback manually in the cleanup routine. */
7073
7074 if (writeback)
7075 insn &= ~(1 << 21);
7076
7077 new_regmask = (1 << num_in_list) - 1;
7078
7079 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7080 "%.4x, modified list %.4x",
7081 rn, writeback ? "!" : "",
7082 (int) insn & 0xffff, new_regmask);
7083
7084 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7085
7086 dsc->cleanup = &cleanup_block_load_pc;
7087 }
7088 }
7089 else
7090 {
7091 /* STM of a list of registers which includes PC. Run the instruction
7092 as-is, but out of line: this will store the wrong value for the PC,
7093 so we must manually fix up the memory in the cleanup routine.
7094 Doing things this way has the advantage that we can auto-detect
7095 the offset of the PC write (which is architecture-dependent) in
7096 the cleanup routine. */
7097 dsc->modinsn[0] = insn;
7098
7099 dsc->cleanup = &cleanup_block_store_pc;
7100 }
7101
7102 return 0;
7103 }
7104
7105 static int
7106 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7107 struct regcache *regs,
7108 arm_displaced_step_copy_insn_closure *dsc)
7109 {
7110 int rn = bits (insn1, 0, 3);
7111 int load = bit (insn1, 4);
7112 int writeback = bit (insn1, 5);
7113
7114 /* Block transfers which don't mention PC can be run directly
7115 out-of-line. */
7116 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7117 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7118
7119 if (rn == ARM_PC_REGNUM)
7120 {
7121 warning (_("displaced: Unpredictable LDM or STM with "
7122 "base register r15"));
7123 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7124 "unpredictable ldm/stm", dsc);
7125 }
7126
7127 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7128 insn1, insn2);
7129
7130 /* Clear bit 13, since it should be always zero. */
7131 dsc->u.block.regmask = (insn2 & 0xdfff);
7132 dsc->u.block.rn = rn;
7133
7134 dsc->u.block.load = load;
7135 dsc->u.block.user = 0;
7136 dsc->u.block.increment = bit (insn1, 7);
7137 dsc->u.block.before = bit (insn1, 8);
7138 dsc->u.block.writeback = writeback;
7139 dsc->u.block.cond = INST_AL;
7140 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7141
7142 if (load)
7143 {
7144 if (dsc->u.block.regmask == 0xffff)
7145 {
7146 /* This branch is impossible to happen. */
7147 gdb_assert (0);
7148 }
7149 else
7150 {
7151 unsigned int regmask = dsc->u.block.regmask;
7152 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7153 unsigned int i;
7154
7155 for (i = 0; i < num_in_list; i++)
7156 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7157
7158 if (writeback)
7159 insn1 &= ~(1 << 5);
7160
7161 new_regmask = (1 << num_in_list) - 1;
7162
7163 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7164 "%.4x, modified list %.4x",
7165 rn, writeback ? "!" : "",
7166 (int) dsc->u.block.regmask, new_regmask);
7167
7168 dsc->modinsn[0] = insn1;
7169 dsc->modinsn[1] = (new_regmask & 0xffff);
7170 dsc->numinsns = 2;
7171
7172 dsc->cleanup = &cleanup_block_load_pc;
7173 }
7174 }
7175 else
7176 {
7177 dsc->modinsn[0] = insn1;
7178 dsc->modinsn[1] = insn2;
7179 dsc->numinsns = 2;
7180 dsc->cleanup = &cleanup_block_store_pc;
7181 }
7182 return 0;
7183 }
7184
7185 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7186 This is used to avoid a dependency on BFD's bfd_endian enum. */
7187
7188 ULONGEST
7189 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7190 int byte_order)
7191 {
7192 return read_memory_unsigned_integer (memaddr, len,
7193 (enum bfd_endian) byte_order);
7194 }
7195
7196 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7197
7198 CORE_ADDR
7199 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7200 CORE_ADDR val)
7201 {
7202 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
7203 }
7204
7205 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
7206
7207 static CORE_ADDR
7208 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7209 {
7210 return 0;
7211 }
7212
7213 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7214
7215 int
7216 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7217 {
7218 return arm_is_thumb (self->regcache);
7219 }
7220
7221 /* single_step() is called just before we want to resume the inferior,
7222 if we want to single-step it but there is no hardware or kernel
7223 single-step support. We find the target of the coming instructions
7224 and breakpoint them. */
7225
7226 std::vector<CORE_ADDR>
7227 arm_software_single_step (struct regcache *regcache)
7228 {
7229 struct gdbarch *gdbarch = regcache->arch ();
7230 struct arm_get_next_pcs next_pcs_ctx;
7231
7232 arm_get_next_pcs_ctor (&next_pcs_ctx,
7233 &arm_get_next_pcs_ops,
7234 gdbarch_byte_order (gdbarch),
7235 gdbarch_byte_order_for_code (gdbarch),
7236 0,
7237 regcache);
7238
7239 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7240
7241 for (CORE_ADDR &pc_ref : next_pcs)
7242 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7243
7244 return next_pcs;
7245 }
7246
7247 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7248 for Linux, where some SVC instructions must be treated specially. */
7249
7250 static void
7251 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7252 arm_displaced_step_copy_insn_closure *dsc)
7253 {
7254 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7255
7256 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7257 (unsigned long) resume_addr);
7258
7259 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7260 }
7261
7262
7263 /* Common copy routine for svc instruction. */
7264
7265 static int
7266 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7267 arm_displaced_step_copy_insn_closure *dsc)
7268 {
7269 /* Preparation: none.
7270 Insn: unmodified svc.
7271 Cleanup: pc <- insn_addr + insn_size. */
7272
7273 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7274 instruction. */
7275 dsc->wrote_to_pc = 1;
7276
7277 /* Allow OS-specific code to override SVC handling. */
7278 if (dsc->u.svc.copy_svc_os)
7279 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7280 else
7281 {
7282 dsc->cleanup = &cleanup_svc;
7283 return 0;
7284 }
7285 }
7286
7287 static int
7288 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7289 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7290 {
7291
7292 displaced_debug_printf ("copying svc insn %.8lx",
7293 (unsigned long) insn);
7294
7295 dsc->modinsn[0] = insn;
7296
7297 return install_svc (gdbarch, regs, dsc);
7298 }
7299
7300 static int
7301 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7302 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7303 {
7304
7305 displaced_debug_printf ("copying svc insn %.4x", insn);
7306
7307 dsc->modinsn[0] = insn;
7308
7309 return install_svc (gdbarch, regs, dsc);
7310 }
7311
7312 /* Copy undefined instructions. */
7313
7314 static int
7315 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7316 arm_displaced_step_copy_insn_closure *dsc)
7317 {
7318 displaced_debug_printf ("copying undefined insn %.8lx",
7319 (unsigned long) insn);
7320
7321 dsc->modinsn[0] = insn;
7322
7323 return 0;
7324 }
7325
7326 static int
7327 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7328 arm_displaced_step_copy_insn_closure *dsc)
7329 {
7330
7331 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7332 (unsigned short) insn1, (unsigned short) insn2);
7333
7334 dsc->modinsn[0] = insn1;
7335 dsc->modinsn[1] = insn2;
7336 dsc->numinsns = 2;
7337
7338 return 0;
7339 }
7340
7341 /* Copy unpredictable instructions. */
7342
7343 static int
7344 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7345 arm_displaced_step_copy_insn_closure *dsc)
7346 {
7347 displaced_debug_printf ("copying unpredictable insn %.8lx",
7348 (unsigned long) insn);
7349
7350 dsc->modinsn[0] = insn;
7351
7352 return 0;
7353 }
7354
7355 /* The decode_* functions are instruction decoding helpers. They mostly follow
7356 the presentation in the ARM ARM. */
7357
7358 static int
7359 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7360 struct regcache *regs,
7361 arm_displaced_step_copy_insn_closure *dsc)
7362 {
7363 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7364 unsigned int rn = bits (insn, 16, 19);
7365
7366 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7367 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7368 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7369 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7370 else if ((op1 & 0x60) == 0x20)
7371 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7372 else if ((op1 & 0x71) == 0x40)
7373 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7374 dsc);
7375 else if ((op1 & 0x77) == 0x41)
7376 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7377 else if ((op1 & 0x77) == 0x45)
7378 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7379 else if ((op1 & 0x77) == 0x51)
7380 {
7381 if (rn != 0xf)
7382 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7383 else
7384 return arm_copy_unpred (gdbarch, insn, dsc);
7385 }
7386 else if ((op1 & 0x77) == 0x55)
7387 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7388 else if (op1 == 0x57)
7389 switch (op2)
7390 {
7391 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7392 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7393 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7394 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7395 default: return arm_copy_unpred (gdbarch, insn, dsc);
7396 }
7397 else if ((op1 & 0x63) == 0x43)
7398 return arm_copy_unpred (gdbarch, insn, dsc);
7399 else if ((op2 & 0x1) == 0x0)
7400 switch (op1 & ~0x80)
7401 {
7402 case 0x61:
7403 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7404 case 0x65:
7405 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7406 case 0x71: case 0x75:
7407 /* pld/pldw reg. */
7408 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7409 case 0x63: case 0x67: case 0x73: case 0x77:
7410 return arm_copy_unpred (gdbarch, insn, dsc);
7411 default:
7412 return arm_copy_undef (gdbarch, insn, dsc);
7413 }
7414 else
7415 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7416 }
7417
7418 static int
7419 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7420 struct regcache *regs,
7421 arm_displaced_step_copy_insn_closure *dsc)
7422 {
7423 if (bit (insn, 27) == 0)
7424 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7425 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7426 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7427 {
7428 case 0x0: case 0x2:
7429 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7430
7431 case 0x1: case 0x3:
7432 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7433
7434 case 0x4: case 0x5: case 0x6: case 0x7:
7435 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7436
7437 case 0x8:
7438 switch ((insn & 0xe00000) >> 21)
7439 {
7440 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7441 /* stc/stc2. */
7442 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7443
7444 case 0x2:
7445 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7446
7447 default:
7448 return arm_copy_undef (gdbarch, insn, dsc);
7449 }
7450
7451 case 0x9:
7452 {
7453 int rn_f = (bits (insn, 16, 19) == 0xf);
7454 switch ((insn & 0xe00000) >> 21)
7455 {
7456 case 0x1: case 0x3:
7457 /* ldc/ldc2 imm (undefined for rn == pc). */
7458 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7459 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7460
7461 case 0x2:
7462 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7463
7464 case 0x4: case 0x5: case 0x6: case 0x7:
7465 /* ldc/ldc2 lit (undefined for rn != pc). */
7466 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7467 : arm_copy_undef (gdbarch, insn, dsc);
7468
7469 default:
7470 return arm_copy_undef (gdbarch, insn, dsc);
7471 }
7472 }
7473
7474 case 0xa:
7475 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7476
7477 case 0xb:
7478 if (bits (insn, 16, 19) == 0xf)
7479 /* ldc/ldc2 lit. */
7480 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7481 else
7482 return arm_copy_undef (gdbarch, insn, dsc);
7483
7484 case 0xc:
7485 if (bit (insn, 4))
7486 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7487 else
7488 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7489
7490 case 0xd:
7491 if (bit (insn, 4))
7492 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7493 else
7494 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7495
7496 default:
7497 return arm_copy_undef (gdbarch, insn, dsc);
7498 }
7499 }
7500
7501 /* Decode miscellaneous instructions in dp/misc encoding space. */
7502
7503 static int
7504 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7505 struct regcache *regs,
7506 arm_displaced_step_copy_insn_closure *dsc)
7507 {
7508 unsigned int op2 = bits (insn, 4, 6);
7509 unsigned int op = bits (insn, 21, 22);
7510
7511 switch (op2)
7512 {
7513 case 0x0:
7514 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7515
7516 case 0x1:
7517 if (op == 0x1) /* bx. */
7518 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7519 else if (op == 0x3)
7520 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7521 else
7522 return arm_copy_undef (gdbarch, insn, dsc);
7523
7524 case 0x2:
7525 if (op == 0x1)
7526 /* Not really supported. */
7527 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7528 else
7529 return arm_copy_undef (gdbarch, insn, dsc);
7530
7531 case 0x3:
7532 if (op == 0x1)
7533 return arm_copy_bx_blx_reg (gdbarch, insn,
7534 regs, dsc); /* blx register. */
7535 else
7536 return arm_copy_undef (gdbarch, insn, dsc);
7537
7538 case 0x5:
7539 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7540
7541 case 0x7:
7542 if (op == 0x1)
7543 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7544 else if (op == 0x3)
7545 /* Not really supported. */
7546 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7547 /* Fall through. */
7548
7549 default:
7550 return arm_copy_undef (gdbarch, insn, dsc);
7551 }
7552 }
7553
7554 static int
7555 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7556 struct regcache *regs,
7557 arm_displaced_step_copy_insn_closure *dsc)
7558 {
7559 if (bit (insn, 25))
7560 switch (bits (insn, 20, 24))
7561 {
7562 case 0x10:
7563 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7564
7565 case 0x14:
7566 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7567
7568 case 0x12: case 0x16:
7569 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7570
7571 default:
7572 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7573 }
7574 else
7575 {
7576 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7577
7578 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7579 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7580 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7581 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7582 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7583 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7584 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7585 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7586 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7587 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7588 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7589 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7590 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7591 /* 2nd arg means "unprivileged". */
7592 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7593 dsc);
7594 }
7595
7596 /* Should be unreachable. */
7597 return 1;
7598 }
7599
7600 static int
7601 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7602 struct regcache *regs,
7603 arm_displaced_step_copy_insn_closure *dsc)
7604 {
7605 int a = bit (insn, 25), b = bit (insn, 4);
7606 uint32_t op1 = bits (insn, 20, 24);
7607
7608 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7609 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7610 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7611 else if ((!a && (op1 & 0x17) == 0x02)
7612 || (a && (op1 & 0x17) == 0x02 && !b))
7613 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7614 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7615 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7616 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7617 else if ((!a && (op1 & 0x17) == 0x03)
7618 || (a && (op1 & 0x17) == 0x03 && !b))
7619 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7620 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7621 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7622 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7623 else if ((!a && (op1 & 0x17) == 0x06)
7624 || (a && (op1 & 0x17) == 0x06 && !b))
7625 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7626 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7627 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7628 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7629 else if ((!a && (op1 & 0x17) == 0x07)
7630 || (a && (op1 & 0x17) == 0x07 && !b))
7631 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7632
7633 /* Should be unreachable. */
7634 return 1;
7635 }
7636
7637 static int
7638 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7639 arm_displaced_step_copy_insn_closure *dsc)
7640 {
7641 switch (bits (insn, 20, 24))
7642 {
7643 case 0x00: case 0x01: case 0x02: case 0x03:
7644 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7645
7646 case 0x04: case 0x05: case 0x06: case 0x07:
7647 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7648
7649 case 0x08: case 0x09: case 0x0a: case 0x0b:
7650 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7651 return arm_copy_unmodified (gdbarch, insn,
7652 "decode/pack/unpack/saturate/reverse", dsc);
7653
7654 case 0x18:
7655 if (bits (insn, 5, 7) == 0) /* op2. */
7656 {
7657 if (bits (insn, 12, 15) == 0xf)
7658 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7659 else
7660 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7661 }
7662 else
7663 return arm_copy_undef (gdbarch, insn, dsc);
7664
7665 case 0x1a: case 0x1b:
7666 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7667 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7668 else
7669 return arm_copy_undef (gdbarch, insn, dsc);
7670
7671 case 0x1c: case 0x1d:
7672 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7673 {
7674 if (bits (insn, 0, 3) == 0xf)
7675 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7676 else
7677 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7678 }
7679 else
7680 return arm_copy_undef (gdbarch, insn, dsc);
7681
7682 case 0x1e: case 0x1f:
7683 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7684 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7685 else
7686 return arm_copy_undef (gdbarch, insn, dsc);
7687 }
7688
7689 /* Should be unreachable. */
7690 return 1;
7691 }
7692
7693 static int
7694 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7695 struct regcache *regs,
7696 arm_displaced_step_copy_insn_closure *dsc)
7697 {
7698 if (bit (insn, 25))
7699 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7700 else
7701 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7702 }
7703
7704 static int
7705 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7706 struct regcache *regs,
7707 arm_displaced_step_copy_insn_closure *dsc)
7708 {
7709 unsigned int opcode = bits (insn, 20, 24);
7710
7711 switch (opcode)
7712 {
7713 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7714 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7715
7716 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7717 case 0x12: case 0x16:
7718 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7719
7720 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7721 case 0x13: case 0x17:
7722 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7723
7724 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7725 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7726 /* Note: no writeback for these instructions. Bit 25 will always be
7727 zero though (via caller), so the following works OK. */
7728 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7729 }
7730
7731 /* Should be unreachable. */
7732 return 1;
7733 }
7734
7735 /* Decode shifted register instructions. */
7736
7737 static int
7738 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7739 uint16_t insn2, struct regcache *regs,
7740 arm_displaced_step_copy_insn_closure *dsc)
7741 {
7742 /* PC is only allowed to be used in instruction MOV. */
7743
7744 unsigned int op = bits (insn1, 5, 8);
7745 unsigned int rn = bits (insn1, 0, 3);
7746
7747 if (op == 0x2 && rn == 0xf) /* MOV */
7748 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7749 else
7750 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7751 "dp (shift reg)", dsc);
7752 }
7753
7754
7755 /* Decode extension register load/store. Exactly the same as
7756 arm_decode_ext_reg_ld_st. */
7757
7758 static int
7759 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7760 uint16_t insn2, struct regcache *regs,
7761 arm_displaced_step_copy_insn_closure *dsc)
7762 {
7763 unsigned int opcode = bits (insn1, 4, 8);
7764
7765 switch (opcode)
7766 {
7767 case 0x04: case 0x05:
7768 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7769 "vfp/neon vmov", dsc);
7770
7771 case 0x08: case 0x0c: /* 01x00 */
7772 case 0x0a: case 0x0e: /* 01x10 */
7773 case 0x12: case 0x16: /* 10x10 */
7774 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7775 "vfp/neon vstm/vpush", dsc);
7776
7777 case 0x09: case 0x0d: /* 01x01 */
7778 case 0x0b: case 0x0f: /* 01x11 */
7779 case 0x13: case 0x17: /* 10x11 */
7780 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7781 "vfp/neon vldm/vpop", dsc);
7782
7783 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7784 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7785 "vstr", dsc);
7786 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7787 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7788 }
7789
7790 /* Should be unreachable. */
7791 return 1;
7792 }
7793
7794 static int
7795 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7796 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7797 {
7798 unsigned int op1 = bits (insn, 20, 25);
7799 int op = bit (insn, 4);
7800 unsigned int coproc = bits (insn, 8, 11);
7801
7802 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7803 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7804 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7805 && (coproc & 0xe) != 0xa)
7806 /* stc/stc2. */
7807 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7808 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7809 && (coproc & 0xe) != 0xa)
7810 /* ldc/ldc2 imm/lit. */
7811 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7812 else if ((op1 & 0x3e) == 0x00)
7813 return arm_copy_undef (gdbarch, insn, dsc);
7814 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7815 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7816 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7817 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7818 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7819 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7820 else if ((op1 & 0x30) == 0x20 && !op)
7821 {
7822 if ((coproc & 0xe) == 0xa)
7823 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7824 else
7825 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7826 }
7827 else if ((op1 & 0x30) == 0x20 && op)
7828 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7829 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7830 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7831 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7832 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7833 else if ((op1 & 0x30) == 0x30)
7834 return arm_copy_svc (gdbarch, insn, regs, dsc);
7835 else
7836 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7837 }
7838
7839 static int
7840 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7841 uint16_t insn2, struct regcache *regs,
7842 arm_displaced_step_copy_insn_closure *dsc)
7843 {
7844 unsigned int coproc = bits (insn2, 8, 11);
7845 unsigned int bit_5_8 = bits (insn1, 5, 8);
7846 unsigned int bit_9 = bit (insn1, 9);
7847 unsigned int bit_4 = bit (insn1, 4);
7848
7849 if (bit_9 == 0)
7850 {
7851 if (bit_5_8 == 2)
7852 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7853 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7854 dsc);
7855 else if (bit_5_8 == 0) /* UNDEFINED. */
7856 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7857 else
7858 {
7859 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7860 if ((coproc & 0xe) == 0xa)
7861 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7862 dsc);
7863 else /* coproc is not 101x. */
7864 {
7865 if (bit_4 == 0) /* STC/STC2. */
7866 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7867 "stc/stc2", dsc);
7868 else /* LDC/LDC2 {literal, immediate}. */
7869 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7870 regs, dsc);
7871 }
7872 }
7873 }
7874 else
7875 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7876
7877 return 0;
7878 }
7879
7880 static void
7881 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7882 arm_displaced_step_copy_insn_closure *dsc, int rd)
7883 {
7884 /* ADR Rd, #imm
7885
7886 Rewrite as:
7887
7888 Preparation: Rd <- PC
7889 Insn: ADD Rd, #imm
7890 Cleanup: Null.
7891 */
7892
7893 /* Rd <- PC */
7894 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7895 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7896 }
7897
7898 static int
7899 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7900 arm_displaced_step_copy_insn_closure *dsc,
7901 int rd, unsigned int imm)
7902 {
7903
7904 /* Encoding T2: ADDS Rd, #imm */
7905 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7906
7907 install_pc_relative (gdbarch, regs, dsc, rd);
7908
7909 return 0;
7910 }
7911
7912 static int
7913 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7914 struct regcache *regs,
7915 arm_displaced_step_copy_insn_closure *dsc)
7916 {
7917 unsigned int rd = bits (insn, 8, 10);
7918 unsigned int imm8 = bits (insn, 0, 7);
7919
7920 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7921 rd, imm8, insn);
7922
7923 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7924 }
7925
7926 static int
7927 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7928 uint16_t insn2, struct regcache *regs,
7929 arm_displaced_step_copy_insn_closure *dsc)
7930 {
7931 unsigned int rd = bits (insn2, 8, 11);
7932 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7933 extract raw immediate encoding rather than computing immediate. When
7934 generating ADD or SUB instruction, we can simply perform OR operation to
7935 set immediate into ADD. */
7936 unsigned int imm_3_8 = insn2 & 0x70ff;
7937 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7938
7939 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7940 rd, imm_i, imm_3_8, insn1, insn2);
7941
7942 if (bit (insn1, 7)) /* Encoding T2 */
7943 {
7944 /* Encoding T3: SUB Rd, Rd, #imm */
7945 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7946 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7947 }
7948 else /* Encoding T3 */
7949 {
7950 /* Encoding T3: ADD Rd, Rd, #imm */
7951 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7952 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7953 }
7954 dsc->numinsns = 2;
7955
7956 install_pc_relative (gdbarch, regs, dsc, rd);
7957
7958 return 0;
7959 }
7960
7961 static int
7962 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7963 struct regcache *regs,
7964 arm_displaced_step_copy_insn_closure *dsc)
7965 {
7966 unsigned int rt = bits (insn1, 8, 10);
7967 unsigned int pc;
7968 int imm8 = (bits (insn1, 0, 7) << 2);
7969
7970 /* LDR Rd, #imm8
7971
7972 Rwrite as:
7973
7974 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7975
7976 Insn: LDR R0, [R2, R3];
7977 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7978
7979 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7980
7981 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7982 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7983 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7984 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7985 /* The assembler calculates the required value of the offset from the
7986 Align(PC,4) value of this instruction to the label. */
7987 pc = pc & 0xfffffffc;
7988
7989 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7990 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7991
7992 dsc->rd = rt;
7993 dsc->u.ldst.xfersize = 4;
7994 dsc->u.ldst.rn = 0;
7995 dsc->u.ldst.immed = 0;
7996 dsc->u.ldst.writeback = 0;
7997 dsc->u.ldst.restore_r4 = 0;
7998
7999 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8000
8001 dsc->cleanup = &cleanup_load;
8002
8003 return 0;
8004 }
8005
8006 /* Copy Thumb cbnz/cbz instruction. */
8007
8008 static int
8009 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8010 struct regcache *regs,
8011 arm_displaced_step_copy_insn_closure *dsc)
8012 {
8013 int non_zero = bit (insn1, 11);
8014 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8015 CORE_ADDR from = dsc->insn_addr;
8016 int rn = bits (insn1, 0, 2);
8017 int rn_val = displaced_read_reg (regs, dsc, rn);
8018
8019 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8020 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8021 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8022 condition is false, let it be, cleanup_branch will do nothing. */
8023 if (dsc->u.branch.cond)
8024 {
8025 dsc->u.branch.cond = INST_AL;
8026 dsc->u.branch.dest = from + 4 + imm5;
8027 }
8028 else
8029 dsc->u.branch.dest = from + 2;
8030
8031 dsc->u.branch.link = 0;
8032 dsc->u.branch.exchange = 0;
8033
8034 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8035 non_zero ? "cbnz" : "cbz",
8036 rn, rn_val, insn1, dsc->u.branch.dest);
8037
8038 dsc->modinsn[0] = THUMB_NOP;
8039
8040 dsc->cleanup = &cleanup_branch;
8041 return 0;
8042 }
8043
8044 /* Copy Table Branch Byte/Halfword */
8045 static int
8046 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8047 uint16_t insn2, struct regcache *regs,
8048 arm_displaced_step_copy_insn_closure *dsc)
8049 {
8050 ULONGEST rn_val, rm_val;
8051 int is_tbh = bit (insn2, 4);
8052 CORE_ADDR halfwords = 0;
8053 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8054
8055 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8056 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8057
8058 if (is_tbh)
8059 {
8060 gdb_byte buf[2];
8061
8062 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8063 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8064 }
8065 else
8066 {
8067 gdb_byte buf[1];
8068
8069 target_read_memory (rn_val + rm_val, buf, 1);
8070 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8071 }
8072
8073 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8074 is_tbh ? "tbh" : "tbb",
8075 (unsigned int) rn_val, (unsigned int) rm_val,
8076 (unsigned int) halfwords);
8077
8078 dsc->u.branch.cond = INST_AL;
8079 dsc->u.branch.link = 0;
8080 dsc->u.branch.exchange = 0;
8081 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8082
8083 dsc->cleanup = &cleanup_branch;
8084
8085 return 0;
8086 }
8087
8088 static void
8089 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8090 arm_displaced_step_copy_insn_closure *dsc)
8091 {
8092 /* PC <- r7 */
8093 int val = displaced_read_reg (regs, dsc, 7);
8094 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8095
8096 /* r7 <- r8 */
8097 val = displaced_read_reg (regs, dsc, 8);
8098 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8099
8100 /* r8 <- tmp[0] */
8101 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8102
8103 }
8104
8105 static int
8106 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8107 struct regcache *regs,
8108 arm_displaced_step_copy_insn_closure *dsc)
8109 {
8110 dsc->u.block.regmask = insn1 & 0x00ff;
8111
8112 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8113 to :
8114
8115 (1) register list is full, that is, r0-r7 are used.
8116 Prepare: tmp[0] <- r8
8117
8118 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8119 MOV r8, r7; Move value of r7 to r8;
8120 POP {r7}; Store PC value into r7.
8121
8122 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8123
8124 (2) register list is not full, supposing there are N registers in
8125 register list (except PC, 0 <= N <= 7).
8126 Prepare: for each i, 0 - N, tmp[i] <- ri.
8127
8128 POP {r0, r1, ...., rN};
8129
8130 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8131 from tmp[] properly.
8132 */
8133 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8134 dsc->u.block.regmask, insn1);
8135
8136 if (dsc->u.block.regmask == 0xff)
8137 {
8138 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8139
8140 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8141 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8142 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8143
8144 dsc->numinsns = 3;
8145 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8146 }
8147 else
8148 {
8149 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8150 unsigned int i;
8151 unsigned int new_regmask;
8152
8153 for (i = 0; i < num_in_list + 1; i++)
8154 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8155
8156 new_regmask = (1 << (num_in_list + 1)) - 1;
8157
8158 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8159 "modified list %.4x",
8160 (int) dsc->u.block.regmask, new_regmask);
8161
8162 dsc->u.block.regmask |= 0x8000;
8163 dsc->u.block.writeback = 0;
8164 dsc->u.block.cond = INST_AL;
8165
8166 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8167
8168 dsc->cleanup = &cleanup_block_load_pc;
8169 }
8170
8171 return 0;
8172 }
8173
8174 static void
8175 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8176 struct regcache *regs,
8177 arm_displaced_step_copy_insn_closure *dsc)
8178 {
8179 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8180 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8181 int err = 0;
8182
8183 /* 16-bit thumb instructions. */
8184 switch (op_bit_12_15)
8185 {
8186 /* Shift (imme), add, subtract, move and compare. */
8187 case 0: case 1: case 2: case 3:
8188 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8189 "shift/add/sub/mov/cmp",
8190 dsc);
8191 break;
8192 case 4:
8193 switch (op_bit_10_11)
8194 {
8195 case 0: /* Data-processing */
8196 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8197 "data-processing",
8198 dsc);
8199 break;
8200 case 1: /* Special data instructions and branch and exchange. */
8201 {
8202 unsigned short op = bits (insn1, 7, 9);
8203 if (op == 6 || op == 7) /* BX or BLX */
8204 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8205 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8206 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8207 else
8208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8209 dsc);
8210 }
8211 break;
8212 default: /* LDR (literal) */
8213 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8214 }
8215 break;
8216 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8218 break;
8219 case 10:
8220 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8221 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8222 else /* Generate SP-relative address */
8223 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8224 break;
8225 case 11: /* Misc 16-bit instructions */
8226 {
8227 switch (bits (insn1, 8, 11))
8228 {
8229 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8230 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8231 break;
8232 case 12: case 13: /* POP */
8233 if (bit (insn1, 8)) /* PC is in register list. */
8234 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8235 else
8236 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8237 break;
8238 case 15: /* If-Then, and hints */
8239 if (bits (insn1, 0, 3))
8240 /* If-Then makes up to four following instructions conditional.
8241 IT instruction itself is not conditional, so handle it as a
8242 common unmodified instruction. */
8243 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8244 dsc);
8245 else
8246 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8247 break;
8248 default:
8249 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8250 }
8251 }
8252 break;
8253 case 12:
8254 if (op_bit_10_11 < 2) /* Store multiple registers */
8255 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8256 else /* Load multiple registers */
8257 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8258 break;
8259 case 13: /* Conditional branch and supervisor call */
8260 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8261 err = thumb_copy_b (gdbarch, insn1, dsc);
8262 else
8263 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8264 break;
8265 case 14: /* Unconditional branch */
8266 err = thumb_copy_b (gdbarch, insn1, dsc);
8267 break;
8268 default:
8269 err = 1;
8270 }
8271
8272 if (err)
8273 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8274 }
8275
8276 static int
8277 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8278 uint16_t insn1, uint16_t insn2,
8279 struct regcache *regs,
8280 arm_displaced_step_copy_insn_closure *dsc)
8281 {
8282 int rt = bits (insn2, 12, 15);
8283 int rn = bits (insn1, 0, 3);
8284 int op1 = bits (insn1, 7, 8);
8285
8286 switch (bits (insn1, 5, 6))
8287 {
8288 case 0: /* Load byte and memory hints */
8289 if (rt == 0xf) /* PLD/PLI */
8290 {
8291 if (rn == 0xf)
8292 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8293 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8294 else
8295 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8296 "pli/pld", dsc);
8297 }
8298 else
8299 {
8300 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8301 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8302 1);
8303 else
8304 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8305 "ldrb{reg, immediate}/ldrbt",
8306 dsc);
8307 }
8308
8309 break;
8310 case 1: /* Load halfword and memory hints. */
8311 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8312 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8313 "pld/unalloc memhint", dsc);
8314 else
8315 {
8316 if (rn == 0xf)
8317 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8318 2);
8319 else
8320 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8321 "ldrh/ldrht", dsc);
8322 }
8323 break;
8324 case 2: /* Load word */
8325 {
8326 int insn2_bit_8_11 = bits (insn2, 8, 11);
8327
8328 if (rn == 0xf)
8329 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8330 else if (op1 == 0x1) /* Encoding T3 */
8331 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8332 0, 1);
8333 else /* op1 == 0x0 */
8334 {
8335 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8336 /* LDR (immediate) */
8337 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8338 dsc, bit (insn2, 8), 1);
8339 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8340 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8341 "ldrt", dsc);
8342 else
8343 /* LDR (register) */
8344 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8345 dsc, 0, 0);
8346 }
8347 break;
8348 }
8349 default:
8350 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8351 break;
8352 }
8353 return 0;
8354 }
8355
8356 static void
8357 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8358 uint16_t insn2, struct regcache *regs,
8359 arm_displaced_step_copy_insn_closure *dsc)
8360 {
8361 int err = 0;
8362 unsigned short op = bit (insn2, 15);
8363 unsigned int op1 = bits (insn1, 11, 12);
8364
8365 switch (op1)
8366 {
8367 case 1:
8368 {
8369 switch (bits (insn1, 9, 10))
8370 {
8371 case 0:
8372 if (bit (insn1, 6))
8373 {
8374 /* Load/store {dual, exclusive}, table branch. */
8375 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8376 && bits (insn2, 5, 7) == 0)
8377 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8378 dsc);
8379 else
8380 /* PC is not allowed to use in load/store {dual, exclusive}
8381 instructions. */
8382 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8383 "load/store dual/ex", dsc);
8384 }
8385 else /* load/store multiple */
8386 {
8387 switch (bits (insn1, 7, 8))
8388 {
8389 case 0: case 3: /* SRS, RFE */
8390 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8391 "srs/rfe", dsc);
8392 break;
8393 case 1: case 2: /* LDM/STM/PUSH/POP */
8394 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8395 break;
8396 }
8397 }
8398 break;
8399
8400 case 1:
8401 /* Data-processing (shift register). */
8402 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8403 dsc);
8404 break;
8405 default: /* Coprocessor instructions. */
8406 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8407 break;
8408 }
8409 break;
8410 }
8411 case 2: /* op1 = 2 */
8412 if (op) /* Branch and misc control. */
8413 {
8414 if (bit (insn2, 14) /* BLX/BL */
8415 || bit (insn2, 12) /* Unconditional branch */
8416 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8417 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8418 else
8419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8420 "misc ctrl", dsc);
8421 }
8422 else
8423 {
8424 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8425 {
8426 int dp_op = bits (insn1, 4, 8);
8427 int rn = bits (insn1, 0, 3);
8428 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8429 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8430 regs, dsc);
8431 else
8432 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8433 "dp/pb", dsc);
8434 }
8435 else /* Data processing (modified immediate) */
8436 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8437 "dp/mi", dsc);
8438 }
8439 break;
8440 case 3: /* op1 = 3 */
8441 switch (bits (insn1, 9, 10))
8442 {
8443 case 0:
8444 if (bit (insn1, 4))
8445 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8446 regs, dsc);
8447 else /* NEON Load/Store and Store single data item */
8448 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8449 "neon elt/struct load/store",
8450 dsc);
8451 break;
8452 case 1: /* op1 = 3, bits (9, 10) == 1 */
8453 switch (bits (insn1, 7, 8))
8454 {
8455 case 0: case 1: /* Data processing (register) */
8456 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8457 "dp(reg)", dsc);
8458 break;
8459 case 2: /* Multiply and absolute difference */
8460 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8461 "mul/mua/diff", dsc);
8462 break;
8463 case 3: /* Long multiply and divide */
8464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8465 "lmul/lmua", dsc);
8466 break;
8467 }
8468 break;
8469 default: /* Coprocessor instructions */
8470 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8471 break;
8472 }
8473 break;
8474 default:
8475 err = 1;
8476 }
8477
8478 if (err)
8479 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8480
8481 }
8482
8483 static void
8484 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8485 struct regcache *regs,
8486 arm_displaced_step_copy_insn_closure *dsc)
8487 {
8488 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8489 uint16_t insn1
8490 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8491
8492 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8493 insn1, (unsigned long) from);
8494
8495 dsc->is_thumb = 1;
8496 dsc->insn_size = thumb_insn_size (insn1);
8497 if (thumb_insn_size (insn1) == 4)
8498 {
8499 uint16_t insn2
8500 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8501 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8502 }
8503 else
8504 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8505 }
8506
8507 void
8508 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8509 CORE_ADDR to, struct regcache *regs,
8510 arm_displaced_step_copy_insn_closure *dsc)
8511 {
8512 int err = 0;
8513 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8514 uint32_t insn;
8515
8516 /* Most displaced instructions use a 1-instruction scratch space, so set this
8517 here and override below if/when necessary. */
8518 dsc->numinsns = 1;
8519 dsc->insn_addr = from;
8520 dsc->scratch_base = to;
8521 dsc->cleanup = NULL;
8522 dsc->wrote_to_pc = 0;
8523
8524 if (!displaced_in_arm_mode (regs))
8525 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8526
8527 dsc->is_thumb = 0;
8528 dsc->insn_size = 4;
8529 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8530 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8531 (unsigned long) insn, (unsigned long) from);
8532
8533 if ((insn & 0xf0000000) == 0xf0000000)
8534 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8535 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8536 {
8537 case 0x0: case 0x1: case 0x2: case 0x3:
8538 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8539 break;
8540
8541 case 0x4: case 0x5: case 0x6:
8542 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8543 break;
8544
8545 case 0x7:
8546 err = arm_decode_media (gdbarch, insn, dsc);
8547 break;
8548
8549 case 0x8: case 0x9: case 0xa: case 0xb:
8550 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8551 break;
8552
8553 case 0xc: case 0xd: case 0xe: case 0xf:
8554 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8555 break;
8556 }
8557
8558 if (err)
8559 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8560 }
8561
8562 /* Actually set up the scratch space for a displaced instruction. */
8563
8564 void
8565 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8566 CORE_ADDR to,
8567 arm_displaced_step_copy_insn_closure *dsc)
8568 {
8569 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8570 unsigned int i, len, offset;
8571 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8572 int size = dsc->is_thumb? 2 : 4;
8573 const gdb_byte *bkp_insn;
8574
8575 offset = 0;
8576 /* Poke modified instruction(s). */
8577 for (i = 0; i < dsc->numinsns; i++)
8578 {
8579 if (size == 4)
8580 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8581 dsc->modinsn[i], (unsigned long) to + offset);
8582 else if (size == 2)
8583 displaced_debug_printf ("writing insn %.4x at %.8lx",
8584 (unsigned short) dsc->modinsn[i],
8585 (unsigned long) to + offset);
8586
8587 write_memory_unsigned_integer (to + offset, size,
8588 byte_order_for_code,
8589 dsc->modinsn[i]);
8590 offset += size;
8591 }
8592
8593 /* Choose the correct breakpoint instruction. */
8594 if (dsc->is_thumb)
8595 {
8596 bkp_insn = tdep->thumb_breakpoint;
8597 len = tdep->thumb_breakpoint_size;
8598 }
8599 else
8600 {
8601 bkp_insn = tdep->arm_breakpoint;
8602 len = tdep->arm_breakpoint_size;
8603 }
8604
8605 /* Put breakpoint afterwards. */
8606 write_memory (to + offset, bkp_insn, len);
8607
8608 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8609 paddress (gdbarch, to));
8610 }
8611
8612 /* Entry point for cleaning things up after a displaced instruction has been
8613 single-stepped. */
8614
8615 void
8616 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8617 struct displaced_step_copy_insn_closure *dsc_,
8618 CORE_ADDR from, CORE_ADDR to,
8619 struct regcache *regs)
8620 {
8621 arm_displaced_step_copy_insn_closure *dsc
8622 = (arm_displaced_step_copy_insn_closure *) dsc_;
8623
8624 if (dsc->cleanup)
8625 dsc->cleanup (gdbarch, regs, dsc);
8626
8627 if (!dsc->wrote_to_pc)
8628 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8629 dsc->insn_addr + dsc->insn_size);
8630
8631 }
8632
8633 #include "bfd-in2.h"
8634 #include "libcoff.h"
8635
8636 static int
8637 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8638 {
8639 gdb_disassemble_info *di
8640 = static_cast<gdb_disassemble_info *> (info->application_data);
8641 struct gdbarch *gdbarch = di->arch ();
8642
8643 if (arm_pc_is_thumb (gdbarch, memaddr))
8644 {
8645 static asymbol *asym;
8646 static combined_entry_type ce;
8647 static struct coff_symbol_struct csym;
8648 static struct bfd fake_bfd;
8649 static bfd_target fake_target;
8650
8651 if (csym.native == NULL)
8652 {
8653 /* Create a fake symbol vector containing a Thumb symbol.
8654 This is solely so that the code in print_insn_little_arm()
8655 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8656 the presence of a Thumb symbol and switch to decoding
8657 Thumb instructions. */
8658
8659 fake_target.flavour = bfd_target_coff_flavour;
8660 fake_bfd.xvec = &fake_target;
8661 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8662 csym.native = &ce;
8663 csym.symbol.the_bfd = &fake_bfd;
8664 csym.symbol.name = "fake";
8665 asym = (asymbol *) & csym;
8666 }
8667
8668 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8669 info->symbols = &asym;
8670 }
8671 else
8672 info->symbols = NULL;
8673
8674 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8675 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8676 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8677 the assert on the mismatch of info->mach and
8678 bfd_get_mach (current_program_space->exec_bfd ()) in
8679 default_print_insn. */
8680 if (current_program_space->exec_bfd () != NULL
8681 && (current_program_space->exec_bfd ()->arch_info
8682 == gdbarch_bfd_arch_info (gdbarch)))
8683 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8684
8685 return default_print_insn (memaddr, info);
8686 }
8687
8688 /* The following define instruction sequences that will cause ARM
8689 cpu's to take an undefined instruction trap. These are used to
8690 signal a breakpoint to GDB.
8691
8692 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8693 modes. A different instruction is required for each mode. The ARM
8694 cpu's can also be big or little endian. Thus four different
8695 instructions are needed to support all cases.
8696
8697 Note: ARMv4 defines several new instructions that will take the
8698 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8699 not in fact add the new instructions. The new undefined
8700 instructions in ARMv4 are all instructions that had no defined
8701 behaviour in earlier chips. There is no guarantee that they will
8702 raise an exception, but may be treated as NOP's. In practice, it
8703 may only safe to rely on instructions matching:
8704
8705 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8706 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8707 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8708
8709 Even this may only true if the condition predicate is true. The
8710 following use a condition predicate of ALWAYS so it is always TRUE.
8711
8712 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8713 and NetBSD all use a software interrupt rather than an undefined
8714 instruction to force a trap. This can be handled by by the
8715 abi-specific code during establishment of the gdbarch vector. */
8716
8717 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8718 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8719 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8720 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8721
8722 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8723 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8724 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8725 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8726
8727 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8728
8729 static int
8730 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8731 {
8732 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8733 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8734
8735 if (arm_pc_is_thumb (gdbarch, *pcptr))
8736 {
8737 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8738
8739 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8740 check whether we are replacing a 32-bit instruction. */
8741 if (tdep->thumb2_breakpoint != NULL)
8742 {
8743 gdb_byte buf[2];
8744
8745 if (target_read_memory (*pcptr, buf, 2) == 0)
8746 {
8747 unsigned short inst1;
8748
8749 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8750 if (thumb_insn_size (inst1) == 4)
8751 return ARM_BP_KIND_THUMB2;
8752 }
8753 }
8754
8755 return ARM_BP_KIND_THUMB;
8756 }
8757 else
8758 return ARM_BP_KIND_ARM;
8759
8760 }
8761
8762 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8763
8764 static const gdb_byte *
8765 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8766 {
8767 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8768
8769 switch (kind)
8770 {
8771 case ARM_BP_KIND_ARM:
8772 *size = tdep->arm_breakpoint_size;
8773 return tdep->arm_breakpoint;
8774 case ARM_BP_KIND_THUMB:
8775 *size = tdep->thumb_breakpoint_size;
8776 return tdep->thumb_breakpoint;
8777 case ARM_BP_KIND_THUMB2:
8778 *size = tdep->thumb2_breakpoint_size;
8779 return tdep->thumb2_breakpoint;
8780 default:
8781 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8782 }
8783 }
8784
8785 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8786
8787 static int
8788 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8789 struct regcache *regcache,
8790 CORE_ADDR *pcptr)
8791 {
8792 gdb_byte buf[4];
8793
8794 /* Check the memory pointed by PC is readable. */
8795 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8796 {
8797 struct arm_get_next_pcs next_pcs_ctx;
8798
8799 arm_get_next_pcs_ctor (&next_pcs_ctx,
8800 &arm_get_next_pcs_ops,
8801 gdbarch_byte_order (gdbarch),
8802 gdbarch_byte_order_for_code (gdbarch),
8803 0,
8804 regcache);
8805
8806 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8807
8808 /* If MEMADDR is the next instruction of current pc, do the
8809 software single step computation, and get the thumb mode by
8810 the destination address. */
8811 for (CORE_ADDR pc : next_pcs)
8812 {
8813 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8814 {
8815 if (IS_THUMB_ADDR (pc))
8816 {
8817 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8818 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8819 }
8820 else
8821 return ARM_BP_KIND_ARM;
8822 }
8823 }
8824 }
8825
8826 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8827 }
8828
8829 /* Extract from an array REGBUF containing the (raw) register state a
8830 function return value of type TYPE, and copy that, in virtual
8831 format, into VALBUF. */
8832
8833 static void
8834 arm_extract_return_value (struct type *type, struct regcache *regs,
8835 gdb_byte *valbuf)
8836 {
8837 struct gdbarch *gdbarch = regs->arch ();
8838 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8839 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8840
8841 if (TYPE_CODE_FLT == type->code ())
8842 {
8843 switch (tdep->fp_model)
8844 {
8845 case ARM_FLOAT_FPA:
8846 {
8847 /* The value is in register F0 in internal format. We need to
8848 extract the raw value and then convert it to the desired
8849 internal type. */
8850 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8851
8852 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8853 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8854 valbuf, type);
8855 }
8856 break;
8857
8858 case ARM_FLOAT_SOFT_FPA:
8859 case ARM_FLOAT_SOFT_VFP:
8860 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8861 not using the VFP ABI code. */
8862 case ARM_FLOAT_VFP:
8863 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8864 if (type->length () > 4)
8865 regs->cooked_read (ARM_A1_REGNUM + 1,
8866 valbuf + ARM_INT_REGISTER_SIZE);
8867 break;
8868
8869 default:
8870 internal_error (_("arm_extract_return_value: "
8871 "Floating point model not supported"));
8872 break;
8873 }
8874 }
8875 else if (type->code () == TYPE_CODE_INT
8876 || type->code () == TYPE_CODE_CHAR
8877 || type->code () == TYPE_CODE_BOOL
8878 || type->code () == TYPE_CODE_PTR
8879 || TYPE_IS_REFERENCE (type)
8880 || type->code () == TYPE_CODE_ENUM
8881 || is_fixed_point_type (type))
8882 {
8883 /* If the type is a plain integer, then the access is
8884 straight-forward. Otherwise we have to play around a bit
8885 more. */
8886 int len = type->length ();
8887 int regno = ARM_A1_REGNUM;
8888 ULONGEST tmp;
8889
8890 while (len > 0)
8891 {
8892 /* By using store_unsigned_integer we avoid having to do
8893 anything special for small big-endian values. */
8894 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8895 store_unsigned_integer (valbuf,
8896 (len > ARM_INT_REGISTER_SIZE
8897 ? ARM_INT_REGISTER_SIZE : len),
8898 byte_order, tmp);
8899 len -= ARM_INT_REGISTER_SIZE;
8900 valbuf += ARM_INT_REGISTER_SIZE;
8901 }
8902 }
8903 else
8904 {
8905 /* For a structure or union the behaviour is as if the value had
8906 been stored to word-aligned memory and then loaded into
8907 registers with 32-bit load instruction(s). */
8908 int len = type->length ();
8909 int regno = ARM_A1_REGNUM;
8910 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8911
8912 while (len > 0)
8913 {
8914 regs->cooked_read (regno++, tmpbuf);
8915 memcpy (valbuf, tmpbuf,
8916 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8917 len -= ARM_INT_REGISTER_SIZE;
8918 valbuf += ARM_INT_REGISTER_SIZE;
8919 }
8920 }
8921 }
8922
8923
8924 /* Will a function return an aggregate type in memory or in a
8925 register? Return 0 if an aggregate type can be returned in a
8926 register, 1 if it must be returned in memory. */
8927
8928 static int
8929 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8930 {
8931 enum type_code code;
8932
8933 type = check_typedef (type);
8934
8935 /* Simple, non-aggregate types (ie not including vectors and
8936 complex) are always returned in a register (or registers). */
8937 code = type->code ();
8938 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8939 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8940 return 0;
8941
8942 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8943 {
8944 /* Vector values should be returned using ARM registers if they
8945 are not over 16 bytes. */
8946 return (type->length () > 16);
8947 }
8948
8949 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8950 if (tdep->arm_abi != ARM_ABI_APCS)
8951 {
8952 /* The AAPCS says all aggregates not larger than a word are returned
8953 in a register. */
8954 if (type->length () <= ARM_INT_REGISTER_SIZE
8955 && language_pass_by_reference (type).trivially_copyable)
8956 return 0;
8957
8958 return 1;
8959 }
8960 else
8961 {
8962 int nRc;
8963
8964 /* All aggregate types that won't fit in a register must be returned
8965 in memory. */
8966 if (type->length () > ARM_INT_REGISTER_SIZE
8967 || !language_pass_by_reference (type).trivially_copyable)
8968 return 1;
8969
8970 /* In the ARM ABI, "integer" like aggregate types are returned in
8971 registers. For an aggregate type to be integer like, its size
8972 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8973 offset of each addressable subfield must be zero. Note that bit
8974 fields are not addressable, and all addressable subfields of
8975 unions always start at offset zero.
8976
8977 This function is based on the behaviour of GCC 2.95.1.
8978 See: gcc/arm.c: arm_return_in_memory() for details.
8979
8980 Note: All versions of GCC before GCC 2.95.2 do not set up the
8981 parameters correctly for a function returning the following
8982 structure: struct { float f;}; This should be returned in memory,
8983 not a register. Richard Earnshaw sent me a patch, but I do not
8984 know of any way to detect if a function like the above has been
8985 compiled with the correct calling convention. */
8986
8987 /* Assume all other aggregate types can be returned in a register.
8988 Run a check for structures, unions and arrays. */
8989 nRc = 0;
8990
8991 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8992 {
8993 int i;
8994 /* Need to check if this struct/union is "integer" like. For
8995 this to be true, its size must be less than or equal to
8996 ARM_INT_REGISTER_SIZE and the offset of each addressable
8997 subfield must be zero. Note that bit fields are not
8998 addressable, and unions always start at offset zero. If any
8999 of the subfields is a floating point type, the struct/union
9000 cannot be an integer type. */
9001
9002 /* For each field in the object, check:
9003 1) Is it FP? --> yes, nRc = 1;
9004 2) Is it addressable (bitpos != 0) and
9005 not packed (bitsize == 0)?
9006 --> yes, nRc = 1
9007 */
9008
9009 for (i = 0; i < type->num_fields (); i++)
9010 {
9011 enum type_code field_type_code;
9012
9013 field_type_code
9014 = check_typedef (type->field (i).type ())->code ();
9015
9016 /* Is it a floating point type field? */
9017 if (field_type_code == TYPE_CODE_FLT)
9018 {
9019 nRc = 1;
9020 break;
9021 }
9022
9023 /* If bitpos != 0, then we have to care about it. */
9024 if (type->field (i).loc_bitpos () != 0)
9025 {
9026 /* Bitfields are not addressable. If the field bitsize is
9027 zero, then the field is not packed. Hence it cannot be
9028 a bitfield or any other packed type. */
9029 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9030 {
9031 nRc = 1;
9032 break;
9033 }
9034 }
9035 }
9036 }
9037
9038 return nRc;
9039 }
9040 }
9041
9042 /* Write into appropriate registers a function return value of type
9043 TYPE, given in virtual format. */
9044
9045 static void
9046 arm_store_return_value (struct type *type, struct regcache *regs,
9047 const gdb_byte *valbuf)
9048 {
9049 struct gdbarch *gdbarch = regs->arch ();
9050 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9051
9052 if (type->code () == TYPE_CODE_FLT)
9053 {
9054 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9055 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9056
9057 switch (tdep->fp_model)
9058 {
9059 case ARM_FLOAT_FPA:
9060
9061 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9062 regs->cooked_write (ARM_F0_REGNUM, buf);
9063 break;
9064
9065 case ARM_FLOAT_SOFT_FPA:
9066 case ARM_FLOAT_SOFT_VFP:
9067 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9068 not using the VFP ABI code. */
9069 case ARM_FLOAT_VFP:
9070 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9071 if (type->length () > 4)
9072 regs->cooked_write (ARM_A1_REGNUM + 1,
9073 valbuf + ARM_INT_REGISTER_SIZE);
9074 break;
9075
9076 default:
9077 internal_error (_("arm_store_return_value: Floating "
9078 "point model not supported"));
9079 break;
9080 }
9081 }
9082 else if (type->code () == TYPE_CODE_INT
9083 || type->code () == TYPE_CODE_CHAR
9084 || type->code () == TYPE_CODE_BOOL
9085 || type->code () == TYPE_CODE_PTR
9086 || TYPE_IS_REFERENCE (type)
9087 || type->code () == TYPE_CODE_ENUM)
9088 {
9089 if (type->length () <= 4)
9090 {
9091 /* Values of one word or less are zero/sign-extended and
9092 returned in r0. */
9093 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9094 LONGEST val = unpack_long (type, valbuf);
9095
9096 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9097 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9098 }
9099 else
9100 {
9101 /* Integral values greater than one word are stored in consecutive
9102 registers starting with r0. This will always be a multiple of
9103 the regiser size. */
9104 int len = type->length ();
9105 int regno = ARM_A1_REGNUM;
9106
9107 while (len > 0)
9108 {
9109 regs->cooked_write (regno++, valbuf);
9110 len -= ARM_INT_REGISTER_SIZE;
9111 valbuf += ARM_INT_REGISTER_SIZE;
9112 }
9113 }
9114 }
9115 else
9116 {
9117 /* For a structure or union the behaviour is as if the value had
9118 been stored to word-aligned memory and then loaded into
9119 registers with 32-bit load instruction(s). */
9120 int len = type->length ();
9121 int regno = ARM_A1_REGNUM;
9122 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9123
9124 while (len > 0)
9125 {
9126 memcpy (tmpbuf, valbuf,
9127 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9128 regs->cooked_write (regno++, tmpbuf);
9129 len -= ARM_INT_REGISTER_SIZE;
9130 valbuf += ARM_INT_REGISTER_SIZE;
9131 }
9132 }
9133 }
9134
9135
9136 /* Handle function return values. */
9137
9138 static enum return_value_convention
9139 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9140 struct type *valtype, struct regcache *regcache,
9141 gdb_byte *readbuf, const gdb_byte *writebuf)
9142 {
9143 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9144 struct type *func_type = function ? value_type (function) : NULL;
9145 enum arm_vfp_cprc_base_type vfp_base_type;
9146 int vfp_base_count;
9147
9148 if (arm_vfp_abi_for_function (gdbarch, func_type)
9149 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9150 {
9151 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9152 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9153 int i;
9154 for (i = 0; i < vfp_base_count; i++)
9155 {
9156 if (reg_char == 'q')
9157 {
9158 if (writebuf)
9159 arm_neon_quad_write (gdbarch, regcache, i,
9160 writebuf + i * unit_length);
9161
9162 if (readbuf)
9163 arm_neon_quad_read (gdbarch, regcache, i,
9164 readbuf + i * unit_length);
9165 }
9166 else
9167 {
9168 char name_buf[4];
9169 int regnum;
9170
9171 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9172 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9173 strlen (name_buf));
9174 if (writebuf)
9175 regcache->cooked_write (regnum, writebuf + i * unit_length);
9176 if (readbuf)
9177 regcache->cooked_read (regnum, readbuf + i * unit_length);
9178 }
9179 }
9180 return RETURN_VALUE_REGISTER_CONVENTION;
9181 }
9182
9183 if (valtype->code () == TYPE_CODE_STRUCT
9184 || valtype->code () == TYPE_CODE_UNION
9185 || valtype->code () == TYPE_CODE_ARRAY)
9186 {
9187 /* From the AAPCS document:
9188
9189 Result return:
9190
9191 A Composite Type larger than 4 bytes, or whose size cannot be
9192 determined statically by both caller and callee, is stored in memory
9193 at an address passed as an extra argument when the function was
9194 called (Parameter Passing, rule A.4). The memory to be used for the
9195 result may be modified at any point during the function call.
9196
9197 Parameter Passing:
9198
9199 A.4: If the subroutine is a function that returns a result in memory,
9200 then the address for the result is placed in r0 and the NCRN is set
9201 to r1. */
9202 if (tdep->struct_return == pcc_struct_return
9203 || arm_return_in_memory (gdbarch, valtype))
9204 {
9205 if (readbuf)
9206 {
9207 CORE_ADDR addr;
9208
9209 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9210 read_memory (addr, readbuf, valtype->length ());
9211 }
9212 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9213 }
9214 }
9215 else if (valtype->code () == TYPE_CODE_COMPLEX)
9216 {
9217 if (arm_return_in_memory (gdbarch, valtype))
9218 return RETURN_VALUE_STRUCT_CONVENTION;
9219 }
9220
9221 if (writebuf)
9222 arm_store_return_value (valtype, regcache, writebuf);
9223
9224 if (readbuf)
9225 arm_extract_return_value (valtype, regcache, readbuf);
9226
9227 return RETURN_VALUE_REGISTER_CONVENTION;
9228 }
9229
9230
9231 static int
9232 arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc)
9233 {
9234 struct gdbarch *gdbarch = get_frame_arch (frame);
9235 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9236 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9237 CORE_ADDR jb_addr;
9238 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9239
9240 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9241
9242 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9243 ARM_INT_REGISTER_SIZE))
9244 return 0;
9245
9246 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9247 return 1;
9248 }
9249 /* A call to cmse secure entry function "foo" at "a" is modified by
9250 GNU ld as "b".
9251 a) bl xxxx <foo>
9252
9253 <foo>
9254 xxxx:
9255
9256 b) bl yyyy <__acle_se_foo>
9257
9258 section .gnu.sgstubs:
9259 <foo>
9260 yyyy: sg // secure gateway
9261 b.w xxxx <__acle_se_foo> // original_branch_dest
9262
9263 <__acle_se_foo>
9264 xxxx:
9265
9266 When the control at "b", the pc contains "yyyy" (sg address) which is a
9267 trampoline and does not exist in source code. This function returns the
9268 target pc "xxxx". For more details please refer to section 5.4
9269 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9270 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9271 document on www.developer.arm.com. */
9272
9273 static CORE_ADDR
9274 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9275 {
9276 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9277 char *target_name = (char *) alloca (target_len);
9278 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9279
9280 struct bound_minimal_symbol minsym
9281 = lookup_minimal_symbol (target_name, NULL, objfile);
9282
9283 if (minsym.minsym != nullptr)
9284 return minsym.value_address ();
9285
9286 return 0;
9287 }
9288
9289 /* Return true when SEC points to ".gnu.sgstubs" section. */
9290
9291 static bool
9292 arm_is_sgstubs_section (struct obj_section *sec)
9293 {
9294 return (sec != nullptr
9295 && sec->the_bfd_section != nullptr
9296 && sec->the_bfd_section->name != nullptr
9297 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9298 }
9299
9300 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9301 return the target PC. Otherwise return 0. */
9302
9303 CORE_ADDR
9304 arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc)
9305 {
9306 const char *name;
9307 int namelen;
9308 CORE_ADDR start_addr;
9309
9310 /* Find the starting address and name of the function containing the PC. */
9311 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9312 {
9313 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9314 check here. */
9315 start_addr = arm_skip_bx_reg (frame, pc);
9316 if (start_addr != 0)
9317 return start_addr;
9318
9319 return 0;
9320 }
9321
9322 /* If PC is in a Thumb call or return stub, return the address of the
9323 target PC, which is in a register. The thunk functions are called
9324 _call_via_xx, where x is the register name. The possible names
9325 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9326 functions, named __ARM_call_via_r[0-7]. */
9327 if (startswith (name, "_call_via_")
9328 || startswith (name, "__ARM_call_via_"))
9329 {
9330 /* Use the name suffix to determine which register contains the
9331 target PC. */
9332 static const char *table[15] =
9333 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9334 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9335 };
9336 int regno;
9337 int offset = strlen (name) - 2;
9338
9339 for (regno = 0; regno <= 14; regno++)
9340 if (strcmp (&name[offset], table[regno]) == 0)
9341 return get_frame_register_unsigned (frame, regno);
9342 }
9343
9344 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9345 non-interworking calls to foo. We could decode the stubs
9346 to find the target but it's easier to use the symbol table. */
9347 namelen = strlen (name);
9348 if (name[0] == '_' && name[1] == '_'
9349 && ((namelen > 2 + strlen ("_from_thumb")
9350 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9351 || (namelen > 2 + strlen ("_from_arm")
9352 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9353 {
9354 char *target_name;
9355 int target_len = namelen - 2;
9356 struct bound_minimal_symbol minsym;
9357 struct objfile *objfile;
9358 struct obj_section *sec;
9359
9360 if (name[namelen - 1] == 'b')
9361 target_len -= strlen ("_from_thumb");
9362 else
9363 target_len -= strlen ("_from_arm");
9364
9365 target_name = (char *) alloca (target_len + 1);
9366 memcpy (target_name, name + 2, target_len);
9367 target_name[target_len] = '\0';
9368
9369 sec = find_pc_section (pc);
9370 objfile = (sec == NULL) ? NULL : sec->objfile;
9371 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9372 if (minsym.minsym != NULL)
9373 return minsym.value_address ();
9374 else
9375 return 0;
9376 }
9377
9378 struct obj_section *section = find_pc_section (pc);
9379
9380 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9381 if (arm_is_sgstubs_section (section))
9382 return arm_skip_cmse_entry (pc, name, section->objfile);
9383
9384 return 0; /* not a stub */
9385 }
9386
9387 static void
9388 arm_update_current_architecture (void)
9389 {
9390 /* If the current architecture is not ARM, we have nothing to do. */
9391 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9392 return;
9393
9394 /* Update the architecture. */
9395 gdbarch_info info;
9396 if (!gdbarch_update_p (info))
9397 internal_error (_("could not update architecture"));
9398 }
9399
9400 static void
9401 set_fp_model_sfunc (const char *args, int from_tty,
9402 struct cmd_list_element *c)
9403 {
9404 int fp_model;
9405
9406 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9407 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9408 {
9409 arm_fp_model = (enum arm_float_model) fp_model;
9410 break;
9411 }
9412
9413 if (fp_model == ARM_FLOAT_LAST)
9414 internal_error (_("Invalid fp model accepted: %s."),
9415 current_fp_model);
9416
9417 arm_update_current_architecture ();
9418 }
9419
9420 static void
9421 show_fp_model (struct ui_file *file, int from_tty,
9422 struct cmd_list_element *c, const char *value)
9423 {
9424 if (arm_fp_model == ARM_FLOAT_AUTO
9425 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9426 {
9427 arm_gdbarch_tdep *tdep
9428 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9429
9430 gdb_printf (file, _("\
9431 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9432 fp_model_strings[tdep->fp_model]);
9433 }
9434 else
9435 gdb_printf (file, _("\
9436 The current ARM floating point model is \"%s\".\n"),
9437 fp_model_strings[arm_fp_model]);
9438 }
9439
9440 static void
9441 arm_set_abi (const char *args, int from_tty,
9442 struct cmd_list_element *c)
9443 {
9444 int arm_abi;
9445
9446 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9447 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9448 {
9449 arm_abi_global = (enum arm_abi_kind) arm_abi;
9450 break;
9451 }
9452
9453 if (arm_abi == ARM_ABI_LAST)
9454 internal_error (_("Invalid ABI accepted: %s."),
9455 arm_abi_string);
9456
9457 arm_update_current_architecture ();
9458 }
9459
9460 static void
9461 arm_show_abi (struct ui_file *file, int from_tty,
9462 struct cmd_list_element *c, const char *value)
9463 {
9464 if (arm_abi_global == ARM_ABI_AUTO
9465 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9466 {
9467 arm_gdbarch_tdep *tdep
9468 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9469
9470 gdb_printf (file, _("\
9471 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9472 arm_abi_strings[tdep->arm_abi]);
9473 }
9474 else
9475 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9476 arm_abi_string);
9477 }
9478
9479 static void
9480 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9481 struct cmd_list_element *c, const char *value)
9482 {
9483 gdb_printf (file,
9484 _("The current execution mode assumed "
9485 "(when symbols are unavailable) is \"%s\".\n"),
9486 arm_fallback_mode_string);
9487 }
9488
9489 static void
9490 arm_show_force_mode (struct ui_file *file, int from_tty,
9491 struct cmd_list_element *c, const char *value)
9492 {
9493 gdb_printf (file,
9494 _("The current execution mode assumed "
9495 "(even when symbols are available) is \"%s\".\n"),
9496 arm_force_mode_string);
9497 }
9498
9499 static void
9500 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9501 struct cmd_list_element *c, const char *value)
9502 {
9503 gdb_printf (file,
9504 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9505 arm_unwind_secure_frames ? "on" : "off");
9506 }
9507
9508 /* If the user changes the register disassembly style used for info
9509 register and other commands, we have to also switch the style used
9510 in opcodes for disassembly output. This function is run in the "set
9511 arm disassembly" command, and does that. */
9512
9513 static void
9514 set_disassembly_style_sfunc (const char *args, int from_tty,
9515 struct cmd_list_element *c)
9516 {
9517 /* Convert the short style name into the long style name (eg, reg-names-*)
9518 before calling the generic set_disassembler_options() function. */
9519 std::string long_name = std::string ("reg-names-") + disassembly_style;
9520 set_disassembler_options (&long_name[0]);
9521 }
9522
9523 static void
9524 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9525 struct cmd_list_element *c, const char *value)
9526 {
9527 struct gdbarch *gdbarch = get_current_arch ();
9528 char *options = get_disassembler_options (gdbarch);
9529 const char *style = "";
9530 int len = 0;
9531 const char *opt;
9532
9533 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9534 if (startswith (opt, "reg-names-"))
9535 {
9536 style = &opt[strlen ("reg-names-")];
9537 len = strcspn (style, ",");
9538 }
9539
9540 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9541 }
9542 \f
9543 /* Return the ARM register name corresponding to register I. */
9544 static const char *
9545 arm_register_name (struct gdbarch *gdbarch, int i)
9546 {
9547 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9548
9549 if (is_s_pseudo (gdbarch, i))
9550 {
9551 static const char *const s_pseudo_names[] = {
9552 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9553 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9554 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9555 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9556 };
9557
9558 return s_pseudo_names[i - tdep->s_pseudo_base];
9559 }
9560
9561 if (is_q_pseudo (gdbarch, i))
9562 {
9563 static const char *const q_pseudo_names[] = {
9564 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9565 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9566 };
9567
9568 return q_pseudo_names[i - tdep->q_pseudo_base];
9569 }
9570
9571 if (is_mve_pseudo (gdbarch, i))
9572 return "p0";
9573
9574 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9575 if (is_pacbti_pseudo (gdbarch, i))
9576 return "";
9577
9578 if (i >= ARRAY_SIZE (arm_register_names))
9579 /* These registers are only supported on targets which supply
9580 an XML description. */
9581 return "";
9582
9583 /* Non-pseudo registers. */
9584 return arm_register_names[i];
9585 }
9586
9587 /* Test whether the coff symbol specific value corresponds to a Thumb
9588 function. */
9589
9590 static int
9591 coff_sym_is_thumb (int val)
9592 {
9593 return (val == C_THUMBEXT
9594 || val == C_THUMBSTAT
9595 || val == C_THUMBEXTFUNC
9596 || val == C_THUMBSTATFUNC
9597 || val == C_THUMBLABEL);
9598 }
9599
9600 /* arm_coff_make_msymbol_special()
9601 arm_elf_make_msymbol_special()
9602
9603 These functions test whether the COFF or ELF symbol corresponds to
9604 an address in thumb code, and set a "special" bit in a minimal
9605 symbol to indicate that it does. */
9606
9607 static void
9608 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9609 {
9610 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9611
9612 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9613 == ST_BRANCH_TO_THUMB)
9614 MSYMBOL_SET_SPECIAL (msym);
9615 }
9616
9617 static void
9618 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9619 {
9620 if (coff_sym_is_thumb (val))
9621 MSYMBOL_SET_SPECIAL (msym);
9622 }
9623
9624 static void
9625 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9626 asymbol *sym)
9627 {
9628 const char *name = bfd_asymbol_name (sym);
9629 struct arm_per_bfd *data;
9630 struct arm_mapping_symbol new_map_sym;
9631
9632 gdb_assert (name[0] == '$');
9633 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9634 return;
9635
9636 data = arm_bfd_data_key.get (objfile->obfd.get ());
9637 if (data == NULL)
9638 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9639 objfile->obfd->section_count);
9640 arm_mapping_symbol_vec &map
9641 = data->section_maps[bfd_asymbol_section (sym)->index];
9642
9643 new_map_sym.value = sym->value;
9644 new_map_sym.type = name[1];
9645
9646 /* Insert at the end, the vector will be sorted on first use. */
9647 map.push_back (new_map_sym);
9648 }
9649
9650 static void
9651 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9652 {
9653 struct gdbarch *gdbarch = regcache->arch ();
9654 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9655
9656 /* If necessary, set the T bit. */
9657 if (arm_apcs_32)
9658 {
9659 ULONGEST val, t_bit;
9660 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9661 t_bit = arm_psr_thumb_bit (gdbarch);
9662 if (arm_pc_is_thumb (gdbarch, pc))
9663 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9664 val | t_bit);
9665 else
9666 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9667 val & ~t_bit);
9668 }
9669 }
9670
9671 /* Read the contents of a NEON quad register, by reading from two
9672 double registers. This is used to implement the quad pseudo
9673 registers, and for argument passing in case the quad registers are
9674 missing; vectors are passed in quad registers when using the VFP
9675 ABI, even if a NEON unit is not present. REGNUM is the index of
9676 the quad register, in [0, 15]. */
9677
9678 static enum register_status
9679 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9680 int regnum, gdb_byte *buf)
9681 {
9682 char name_buf[4];
9683 gdb_byte reg_buf[8];
9684 int offset, double_regnum;
9685 enum register_status status;
9686
9687 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9688 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9689 strlen (name_buf));
9690
9691 /* d0 is always the least significant half of q0. */
9692 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9693 offset = 8;
9694 else
9695 offset = 0;
9696
9697 status = regcache->raw_read (double_regnum, reg_buf);
9698 if (status != REG_VALID)
9699 return status;
9700 memcpy (buf + offset, reg_buf, 8);
9701
9702 offset = 8 - offset;
9703 status = regcache->raw_read (double_regnum + 1, reg_buf);
9704 if (status != REG_VALID)
9705 return status;
9706 memcpy (buf + offset, reg_buf, 8);
9707
9708 return REG_VALID;
9709 }
9710
9711 /* Read the contents of the MVE pseudo register REGNUM and store it
9712 in BUF. */
9713
9714 static enum register_status
9715 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9716 int regnum, gdb_byte *buf)
9717 {
9718 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9719
9720 /* P0 is the first 16 bits of VPR. */
9721 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9722 }
9723
9724 static enum register_status
9725 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9726 int regnum, gdb_byte *buf)
9727 {
9728 const int num_regs = gdbarch_num_regs (gdbarch);
9729 char name_buf[4];
9730 gdb_byte reg_buf[8];
9731 int offset, double_regnum;
9732 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9733
9734 gdb_assert (regnum >= num_regs);
9735
9736 if (is_q_pseudo (gdbarch, regnum))
9737 {
9738 /* Quad-precision register. */
9739 return arm_neon_quad_read (gdbarch, regcache,
9740 regnum - tdep->q_pseudo_base, buf);
9741 }
9742 else if (is_mve_pseudo (gdbarch, regnum))
9743 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9744 else
9745 {
9746 enum register_status status;
9747
9748 regnum -= tdep->s_pseudo_base;
9749 /* Single-precision register. */
9750 gdb_assert (regnum < 32);
9751
9752 /* s0 is always the least significant half of d0. */
9753 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9754 offset = (regnum & 1) ? 0 : 4;
9755 else
9756 offset = (regnum & 1) ? 4 : 0;
9757
9758 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9759 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9760 strlen (name_buf));
9761
9762 status = regcache->raw_read (double_regnum, reg_buf);
9763 if (status == REG_VALID)
9764 memcpy (buf, reg_buf + offset, 4);
9765 return status;
9766 }
9767 }
9768
9769 /* Store the contents of BUF to a NEON quad register, by writing to
9770 two double registers. This is used to implement the quad pseudo
9771 registers, and for argument passing in case the quad registers are
9772 missing; vectors are passed in quad registers when using the VFP
9773 ABI, even if a NEON unit is not present. REGNUM is the index
9774 of the quad register, in [0, 15]. */
9775
9776 static void
9777 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9778 int regnum, const gdb_byte *buf)
9779 {
9780 char name_buf[4];
9781 int offset, double_regnum;
9782
9783 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9784 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9785 strlen (name_buf));
9786
9787 /* d0 is always the least significant half of q0. */
9788 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9789 offset = 8;
9790 else
9791 offset = 0;
9792
9793 regcache->raw_write (double_regnum, buf + offset);
9794 offset = 8 - offset;
9795 regcache->raw_write (double_regnum + 1, buf + offset);
9796 }
9797
9798 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9799
9800 static void
9801 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9802 int regnum, const gdb_byte *buf)
9803 {
9804 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9805
9806 /* P0 is the first 16 bits of VPR. */
9807 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9808 }
9809
9810 static void
9811 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9812 int regnum, const gdb_byte *buf)
9813 {
9814 const int num_regs = gdbarch_num_regs (gdbarch);
9815 char name_buf[4];
9816 gdb_byte reg_buf[8];
9817 int offset, double_regnum;
9818 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9819
9820 gdb_assert (regnum >= num_regs);
9821
9822 if (is_q_pseudo (gdbarch, regnum))
9823 {
9824 /* Quad-precision register. */
9825 arm_neon_quad_write (gdbarch, regcache,
9826 regnum - tdep->q_pseudo_base, buf);
9827 }
9828 else if (is_mve_pseudo (gdbarch, regnum))
9829 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9830 else
9831 {
9832 regnum -= tdep->s_pseudo_base;
9833 /* Single-precision register. */
9834 gdb_assert (regnum < 32);
9835
9836 /* s0 is always the least significant half of d0. */
9837 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9838 offset = (regnum & 1) ? 0 : 4;
9839 else
9840 offset = (regnum & 1) ? 4 : 0;
9841
9842 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9843 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9844 strlen (name_buf));
9845
9846 regcache->raw_read (double_regnum, reg_buf);
9847 memcpy (reg_buf + offset, buf, 4);
9848 regcache->raw_write (double_regnum, reg_buf);
9849 }
9850 }
9851
9852 static struct value *
9853 value_of_arm_user_reg (frame_info_ptr frame, const void *baton)
9854 {
9855 const int *reg_p = (const int *) baton;
9856 return value_of_register (*reg_p, frame);
9857 }
9858 \f
9859 static enum gdb_osabi
9860 arm_elf_osabi_sniffer (bfd *abfd)
9861 {
9862 unsigned int elfosabi;
9863 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9864
9865 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9866
9867 if (elfosabi == ELFOSABI_ARM)
9868 /* GNU tools use this value. Check note sections in this case,
9869 as well. */
9870 {
9871 for (asection *sect : gdb_bfd_sections (abfd))
9872 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9873 }
9874
9875 /* Anything else will be handled by the generic ELF sniffer. */
9876 return osabi;
9877 }
9878
9879 static int
9880 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9881 const struct reggroup *group)
9882 {
9883 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9884 this, FPS register belongs to save_regroup, restore_reggroup, and
9885 all_reggroup, of course. */
9886 if (regnum == ARM_FPS_REGNUM)
9887 return (group == float_reggroup
9888 || group == save_reggroup
9889 || group == restore_reggroup
9890 || group == all_reggroup);
9891 else
9892 return default_register_reggroup_p (gdbarch, regnum, group);
9893 }
9894
9895 /* For backward-compatibility we allow two 'g' packet lengths with
9896 the remote protocol depending on whether FPA registers are
9897 supplied. M-profile targets do not have FPA registers, but some
9898 stubs already exist in the wild which use a 'g' packet which
9899 supplies them albeit with dummy values. The packet format which
9900 includes FPA registers should be considered deprecated for
9901 M-profile targets. */
9902
9903 static void
9904 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9905 {
9906 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9907
9908 if (tdep->is_m)
9909 {
9910 const target_desc *tdesc;
9911
9912 /* If we know from the executable this is an M-profile target,
9913 cater for remote targets whose register set layout is the
9914 same as the FPA layout. */
9915 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9916 register_remote_g_packet_guess (gdbarch,
9917 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9918 tdesc);
9919
9920 /* The regular M-profile layout. */
9921 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9922 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9923 tdesc);
9924
9925 /* M-profile plus M4F VFP. */
9926 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9927 register_remote_g_packet_guess (gdbarch,
9928 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9929 tdesc);
9930 /* M-profile plus MVE. */
9931 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9932 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9933 + ARM_VFP2_REGS_SIZE
9934 + ARM_INT_REGISTER_SIZE, tdesc);
9935
9936 /* M-profile system (stack pointers). */
9937 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
9938 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
9939 }
9940
9941 /* Otherwise we don't have a useful guess. */
9942 }
9943
9944 /* Implement the code_of_frame_writable gdbarch method. */
9945
9946 static int
9947 arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame)
9948 {
9949 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9950
9951 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9952 {
9953 /* M-profile exception frames return to some magic PCs, where
9954 isn't writable at all. */
9955 return 0;
9956 }
9957 else
9958 return 1;
9959 }
9960
9961 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9962 to be postfixed by a version (eg armv7hl). */
9963
9964 static const char *
9965 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9966 {
9967 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9968 return "arm(v[^- ]*)?";
9969 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9970 }
9971
9972 /* Implement the "get_pc_address_flags" gdbarch method. */
9973
9974 static std::string
9975 arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc)
9976 {
9977 if (get_frame_pc_masked (frame))
9978 return "PAC";
9979
9980 return "";
9981 }
9982
9983 /* Initialize the current architecture based on INFO. If possible,
9984 re-use an architecture from ARCHES, which is a list of
9985 architectures already created during this debugging session.
9986
9987 Called e.g. at program startup, when reading a core file, and when
9988 reading a binary file. */
9989
9990 static struct gdbarch *
9991 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9992 {
9993 struct gdbarch *gdbarch;
9994 struct gdbarch_list *best_arch;
9995 enum arm_abi_kind arm_abi = arm_abi_global;
9996 enum arm_float_model fp_model = arm_fp_model;
9997 tdesc_arch_data_up tdesc_data;
9998 int i;
9999 bool is_m = false;
10000 bool have_sec_ext = false;
10001 int vfp_register_count = 0;
10002 bool have_s_pseudos = false, have_q_pseudos = false;
10003 bool have_wmmx_registers = false;
10004 bool have_neon = false;
10005 bool have_fpa_registers = true;
10006 const struct target_desc *tdesc = info.target_desc;
10007 bool have_vfp = false;
10008 bool have_mve = false;
10009 bool have_pacbti = false;
10010 int mve_vpr_regnum = -1;
10011 int register_count = ARM_NUM_REGS;
10012 bool have_m_profile_msp = false;
10013 int m_profile_msp_regnum = -1;
10014 int m_profile_psp_regnum = -1;
10015 int m_profile_msp_ns_regnum = -1;
10016 int m_profile_psp_ns_regnum = -1;
10017 int m_profile_msp_s_regnum = -1;
10018 int m_profile_psp_s_regnum = -1;
10019 int tls_regnum = 0;
10020
10021 /* If we have an object to base this architecture on, try to determine
10022 its ABI. */
10023
10024 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10025 {
10026 int ei_osabi, e_flags;
10027
10028 switch (bfd_get_flavour (info.abfd))
10029 {
10030 case bfd_target_coff_flavour:
10031 /* Assume it's an old APCS-style ABI. */
10032 /* XXX WinCE? */
10033 arm_abi = ARM_ABI_APCS;
10034 break;
10035
10036 case bfd_target_elf_flavour:
10037 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10038 e_flags = elf_elfheader (info.abfd)->e_flags;
10039
10040 if (ei_osabi == ELFOSABI_ARM)
10041 {
10042 /* GNU tools used to use this value, but do not for EABI
10043 objects. There's nowhere to tag an EABI version
10044 anyway, so assume APCS. */
10045 arm_abi = ARM_ABI_APCS;
10046 }
10047 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10048 {
10049 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10050
10051 switch (eabi_ver)
10052 {
10053 case EF_ARM_EABI_UNKNOWN:
10054 /* Assume GNU tools. */
10055 arm_abi = ARM_ABI_APCS;
10056 break;
10057
10058 case EF_ARM_EABI_VER4:
10059 case EF_ARM_EABI_VER5:
10060 arm_abi = ARM_ABI_AAPCS;
10061 /* EABI binaries default to VFP float ordering.
10062 They may also contain build attributes that can
10063 be used to identify if the VFP argument-passing
10064 ABI is in use. */
10065 if (fp_model == ARM_FLOAT_AUTO)
10066 {
10067 #ifdef HAVE_ELF
10068 switch (bfd_elf_get_obj_attr_int (info.abfd,
10069 OBJ_ATTR_PROC,
10070 Tag_ABI_VFP_args))
10071 {
10072 case AEABI_VFP_args_base:
10073 /* "The user intended FP parameter/result
10074 passing to conform to AAPCS, base
10075 variant". */
10076 fp_model = ARM_FLOAT_SOFT_VFP;
10077 break;
10078 case AEABI_VFP_args_vfp:
10079 /* "The user intended FP parameter/result
10080 passing to conform to AAPCS, VFP
10081 variant". */
10082 fp_model = ARM_FLOAT_VFP;
10083 break;
10084 case AEABI_VFP_args_toolchain:
10085 /* "The user intended FP parameter/result
10086 passing to conform to tool chain-specific
10087 conventions" - we don't know any such
10088 conventions, so leave it as "auto". */
10089 break;
10090 case AEABI_VFP_args_compatible:
10091 /* "Code is compatible with both the base
10092 and VFP variants; the user did not permit
10093 non-variadic functions to pass FP
10094 parameters/results" - leave it as
10095 "auto". */
10096 break;
10097 default:
10098 /* Attribute value not mentioned in the
10099 November 2012 ABI, so leave it as
10100 "auto". */
10101 break;
10102 }
10103 #else
10104 fp_model = ARM_FLOAT_SOFT_VFP;
10105 #endif
10106 }
10107 break;
10108
10109 default:
10110 /* Leave it as "auto". */
10111 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10112 break;
10113 }
10114
10115 #ifdef HAVE_ELF
10116 /* Detect M-profile programs. This only works if the
10117 executable file includes build attributes; GCC does
10118 copy them to the executable, but e.g. RealView does
10119 not. */
10120 int attr_arch
10121 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10122 Tag_CPU_arch);
10123 int attr_profile
10124 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10125 Tag_CPU_arch_profile);
10126
10127 /* GCC specifies the profile for v6-M; RealView only
10128 specifies the profile for architectures starting with
10129 V7 (as opposed to architectures with a tag
10130 numerically greater than TAG_CPU_ARCH_V7). */
10131 if (!tdesc_has_registers (tdesc)
10132 && (attr_arch == TAG_CPU_ARCH_V6_M
10133 || attr_arch == TAG_CPU_ARCH_V6S_M
10134 || attr_arch == TAG_CPU_ARCH_V7E_M
10135 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10136 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10137 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10138 || attr_profile == 'M'))
10139 is_m = true;
10140
10141 /* Look for attributes that indicate support for ARMv8.1-m
10142 PACBTI. */
10143 if (!tdesc_has_registers (tdesc) && is_m)
10144 {
10145 int attr_pac_extension
10146 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10147 Tag_PAC_extension);
10148
10149 int attr_bti_extension
10150 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10151 Tag_BTI_extension);
10152
10153 int attr_pacret_use
10154 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10155 Tag_PACRET_use);
10156
10157 int attr_bti_use
10158 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10159 Tag_BTI_use);
10160
10161 if (attr_pac_extension != 0 || attr_bti_extension != 0
10162 || attr_pacret_use != 0 || attr_bti_use != 0)
10163 have_pacbti = true;
10164 }
10165 #endif
10166 }
10167
10168 if (fp_model == ARM_FLOAT_AUTO)
10169 {
10170 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10171 {
10172 case 0:
10173 /* Leave it as "auto". Strictly speaking this case
10174 means FPA, but almost nobody uses that now, and
10175 many toolchains fail to set the appropriate bits
10176 for the floating-point model they use. */
10177 break;
10178 case EF_ARM_SOFT_FLOAT:
10179 fp_model = ARM_FLOAT_SOFT_FPA;
10180 break;
10181 case EF_ARM_VFP_FLOAT:
10182 fp_model = ARM_FLOAT_VFP;
10183 break;
10184 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10185 fp_model = ARM_FLOAT_SOFT_VFP;
10186 break;
10187 }
10188 }
10189
10190 if (e_flags & EF_ARM_BE8)
10191 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10192
10193 break;
10194
10195 default:
10196 /* Leave it as "auto". */
10197 break;
10198 }
10199 }
10200
10201 /* Check any target description for validity. */
10202 if (tdesc_has_registers (tdesc))
10203 {
10204 /* For most registers we require GDB's default names; but also allow
10205 the numeric names for sp / lr / pc, as a convenience. */
10206 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10207 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10208 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10209
10210 const struct tdesc_feature *feature;
10211 int valid_p;
10212
10213 feature = tdesc_find_feature (tdesc,
10214 "org.gnu.gdb.arm.core");
10215 if (feature == NULL)
10216 {
10217 feature = tdesc_find_feature (tdesc,
10218 "org.gnu.gdb.arm.m-profile");
10219 if (feature == NULL)
10220 return NULL;
10221 else
10222 is_m = true;
10223 }
10224
10225 tdesc_data = tdesc_data_alloc ();
10226
10227 valid_p = 1;
10228 for (i = 0; i < ARM_SP_REGNUM; i++)
10229 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10230 arm_register_names[i]);
10231 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10232 ARM_SP_REGNUM,
10233 arm_sp_names);
10234 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10235 ARM_LR_REGNUM,
10236 arm_lr_names);
10237 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10238 ARM_PC_REGNUM,
10239 arm_pc_names);
10240 if (is_m)
10241 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10242 ARM_PS_REGNUM, "xpsr");
10243 else
10244 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10245 ARM_PS_REGNUM, "cpsr");
10246
10247 if (!valid_p)
10248 return NULL;
10249
10250 if (is_m)
10251 {
10252 feature = tdesc_find_feature (tdesc,
10253 "org.gnu.gdb.arm.m-system");
10254 if (feature != nullptr)
10255 {
10256 /* MSP */
10257 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10258 register_count, "msp");
10259 if (!valid_p)
10260 {
10261 warning (_("M-profile m-system feature is missing required register msp."));
10262 return nullptr;
10263 }
10264 have_m_profile_msp = true;
10265 m_profile_msp_regnum = register_count++;
10266
10267 /* PSP */
10268 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10269 register_count, "psp");
10270 if (!valid_p)
10271 {
10272 warning (_("M-profile m-system feature is missing required register psp."));
10273 return nullptr;
10274 }
10275 m_profile_psp_regnum = register_count++;
10276 }
10277 }
10278
10279 feature = tdesc_find_feature (tdesc,
10280 "org.gnu.gdb.arm.fpa");
10281 if (feature != NULL)
10282 {
10283 valid_p = 1;
10284 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10285 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10286 arm_register_names[i]);
10287 if (!valid_p)
10288 return NULL;
10289 }
10290 else
10291 have_fpa_registers = false;
10292
10293 feature = tdesc_find_feature (tdesc,
10294 "org.gnu.gdb.xscale.iwmmxt");
10295 if (feature != NULL)
10296 {
10297 static const char *const iwmmxt_names[] = {
10298 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10299 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10300 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10301 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10302 };
10303
10304 valid_p = 1;
10305 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10306 valid_p
10307 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10308 iwmmxt_names[i - ARM_WR0_REGNUM]);
10309
10310 /* Check for the control registers, but do not fail if they
10311 are missing. */
10312 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10313 tdesc_numbered_register (feature, tdesc_data.get (), i,
10314 iwmmxt_names[i - ARM_WR0_REGNUM]);
10315
10316 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10317 valid_p
10318 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10319 iwmmxt_names[i - ARM_WR0_REGNUM]);
10320
10321 if (!valid_p)
10322 return NULL;
10323
10324 have_wmmx_registers = true;
10325 }
10326
10327 /* If we have a VFP unit, check whether the single precision registers
10328 are present. If not, then we will synthesize them as pseudo
10329 registers. */
10330 feature = tdesc_find_feature (tdesc,
10331 "org.gnu.gdb.arm.vfp");
10332 if (feature != NULL)
10333 {
10334 static const char *const vfp_double_names[] = {
10335 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10336 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10337 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10338 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10339 };
10340
10341 /* Require the double precision registers. There must be either
10342 16 or 32. */
10343 valid_p = 1;
10344 for (i = 0; i < 32; i++)
10345 {
10346 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10347 ARM_D0_REGNUM + i,
10348 vfp_double_names[i]);
10349 if (!valid_p)
10350 break;
10351 }
10352 if (!valid_p && i == 16)
10353 valid_p = 1;
10354
10355 /* Also require FPSCR. */
10356 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10357 ARM_FPSCR_REGNUM, "fpscr");
10358 if (!valid_p)
10359 return NULL;
10360
10361 have_vfp = true;
10362
10363 if (tdesc_unnumbered_register (feature, "s0") == 0)
10364 have_s_pseudos = true;
10365
10366 vfp_register_count = i;
10367
10368 /* If we have VFP, also check for NEON. The architecture allows
10369 NEON without VFP (integer vector operations only), but GDB
10370 does not support that. */
10371 feature = tdesc_find_feature (tdesc,
10372 "org.gnu.gdb.arm.neon");
10373 if (feature != NULL)
10374 {
10375 /* NEON requires 32 double-precision registers. */
10376 if (i != 32)
10377 return NULL;
10378
10379 /* If there are quad registers defined by the stub, use
10380 their type; otherwise (normally) provide them with
10381 the default type. */
10382 if (tdesc_unnumbered_register (feature, "q0") == 0)
10383 have_q_pseudos = true;
10384 }
10385 }
10386
10387 /* Check for the TLS register feature. */
10388 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10389 if (feature != nullptr)
10390 {
10391 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10392 register_count, "tpidruro");
10393 if (!valid_p)
10394 return nullptr;
10395
10396 tls_regnum = register_count;
10397 register_count++;
10398 }
10399
10400 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10401 MVE (Helium) is an M-profile extension. */
10402 if (is_m)
10403 {
10404 /* Do we have the MVE feature? */
10405 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10406
10407 if (feature != nullptr)
10408 {
10409 /* If we have MVE, we must always have the VPR register. */
10410 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10411 register_count, "vpr");
10412 if (!valid_p)
10413 {
10414 warning (_("MVE feature is missing required register vpr."));
10415 return nullptr;
10416 }
10417
10418 have_mve = true;
10419 mve_vpr_regnum = register_count;
10420 register_count++;
10421
10422 /* We can't have Q pseudo registers available here, as that
10423 would mean we have NEON features, and that is only available
10424 on A and R profiles. */
10425 gdb_assert (!have_q_pseudos);
10426
10427 /* Given we have a M-profile target description, if MVE is
10428 enabled and there are VFP registers, we should have Q
10429 pseudo registers (Q0 ~ Q7). */
10430 if (have_vfp)
10431 have_q_pseudos = true;
10432 }
10433
10434 /* Do we have the ARMv8.1-m PACBTI feature? */
10435 feature = tdesc_find_feature (tdesc,
10436 "org.gnu.gdb.arm.m-profile-pacbti");
10437 if (feature != nullptr)
10438 {
10439 /* By advertising this feature, the target acknowledges the
10440 presence of the ARMv8.1-m PACBTI extensions.
10441
10442 We don't care for any particular registers in this group, so
10443 the target is free to include whatever it deems appropriate.
10444
10445 The expectation is for this feature to include the PAC
10446 keys. */
10447 have_pacbti = true;
10448 }
10449
10450 /* Do we have the Security extension? */
10451 feature = tdesc_find_feature (tdesc,
10452 "org.gnu.gdb.arm.secext");
10453 if (feature != nullptr)
10454 {
10455 /* Secure/Non-secure stack pointers. */
10456 /* MSP_NS */
10457 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10458 register_count, "msp_ns");
10459 if (!valid_p)
10460 {
10461 warning (_("M-profile secext feature is missing required register msp_ns."));
10462 return nullptr;
10463 }
10464 m_profile_msp_ns_regnum = register_count++;
10465
10466 /* PSP_NS */
10467 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10468 register_count, "psp_ns");
10469 if (!valid_p)
10470 {
10471 warning (_("M-profile secext feature is missing required register psp_ns."));
10472 return nullptr;
10473 }
10474 m_profile_psp_ns_regnum = register_count++;
10475
10476 /* MSP_S */
10477 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10478 register_count, "msp_s");
10479 if (!valid_p)
10480 {
10481 warning (_("M-profile secext feature is missing required register msp_s."));
10482 return nullptr;
10483 }
10484 m_profile_msp_s_regnum = register_count++;
10485
10486 /* PSP_S */
10487 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10488 register_count, "psp_s");
10489 if (!valid_p)
10490 {
10491 warning (_("M-profile secext feature is missing required register psp_s."));
10492 return nullptr;
10493 }
10494 m_profile_psp_s_regnum = register_count++;
10495
10496 have_sec_ext = true;
10497 }
10498
10499 }
10500 }
10501
10502 /* If there is already a candidate, use it. */
10503 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10504 best_arch != NULL;
10505 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10506 {
10507 arm_gdbarch_tdep *tdep
10508 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10509
10510 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10511 continue;
10512
10513 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10514 continue;
10515
10516 /* There are various other properties in tdep that we do not
10517 need to check here: those derived from a target description,
10518 since gdbarches with a different target description are
10519 automatically disqualified. */
10520
10521 /* Do check is_m, though, since it might come from the binary. */
10522 if (is_m != tdep->is_m)
10523 continue;
10524
10525 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10526 the binary. */
10527 if (have_pacbti != tdep->have_pacbti)
10528 continue;
10529
10530 /* Found a match. */
10531 break;
10532 }
10533
10534 if (best_arch != NULL)
10535 return best_arch->gdbarch;
10536
10537 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
10538 gdbarch = gdbarch_alloc (&info, tdep);
10539
10540 /* Record additional information about the architecture we are defining.
10541 These are gdbarch discriminators, like the OSABI. */
10542 tdep->arm_abi = arm_abi;
10543 tdep->fp_model = fp_model;
10544 tdep->is_m = is_m;
10545 tdep->have_sec_ext = have_sec_ext;
10546 tdep->have_fpa_registers = have_fpa_registers;
10547 tdep->have_wmmx_registers = have_wmmx_registers;
10548 gdb_assert (vfp_register_count == 0
10549 || vfp_register_count == 16
10550 || vfp_register_count == 32);
10551 tdep->vfp_register_count = vfp_register_count;
10552 tdep->have_s_pseudos = have_s_pseudos;
10553 tdep->have_q_pseudos = have_q_pseudos;
10554 tdep->have_neon = have_neon;
10555 tdep->tls_regnum = tls_regnum;
10556
10557 /* Adjust the MVE feature settings. */
10558 if (have_mve)
10559 {
10560 tdep->have_mve = true;
10561 tdep->mve_vpr_regnum = mve_vpr_regnum;
10562 }
10563
10564 /* Adjust the PACBTI feature settings. */
10565 tdep->have_pacbti = have_pacbti;
10566
10567 /* Adjust the M-profile stack pointers settings. */
10568 if (have_m_profile_msp)
10569 {
10570 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10571 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10572 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10573 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10574 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10575 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10576 }
10577
10578 arm_register_g_packet_guesses (gdbarch);
10579
10580 /* Breakpoints. */
10581 switch (info.byte_order_for_code)
10582 {
10583 case BFD_ENDIAN_BIG:
10584 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10585 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10586 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10587 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10588
10589 break;
10590
10591 case BFD_ENDIAN_LITTLE:
10592 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10593 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10594 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10595 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10596
10597 break;
10598
10599 default:
10600 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10601 }
10602
10603 /* On ARM targets char defaults to unsigned. */
10604 set_gdbarch_char_signed (gdbarch, 0);
10605
10606 /* wchar_t is unsigned under the AAPCS. */
10607 if (tdep->arm_abi == ARM_ABI_AAPCS)
10608 set_gdbarch_wchar_signed (gdbarch, 0);
10609 else
10610 set_gdbarch_wchar_signed (gdbarch, 1);
10611
10612 /* Compute type alignment. */
10613 set_gdbarch_type_align (gdbarch, arm_type_align);
10614
10615 /* Note: for displaced stepping, this includes the breakpoint, and one word
10616 of additional scratch space. This setting isn't used for anything beside
10617 displaced stepping at present. */
10618 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10619
10620 /* This should be low enough for everything. */
10621 tdep->lowest_pc = 0x20;
10622 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10623
10624 /* The default, for both APCS and AAPCS, is to return small
10625 structures in registers. */
10626 tdep->struct_return = reg_struct_return;
10627
10628 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10629 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10630
10631 if (is_m)
10632 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10633
10634 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10635
10636 frame_base_set_default (gdbarch, &arm_normal_base);
10637
10638 /* Address manipulation. */
10639 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10640
10641 /* Advance PC across function entry code. */
10642 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10643
10644 /* Detect whether PC is at a point where the stack has been destroyed. */
10645 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10646
10647 /* Skip trampolines. */
10648 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10649
10650 /* The stack grows downward. */
10651 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10652
10653 /* Breakpoint manipulation. */
10654 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10655 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10656 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10657 arm_breakpoint_kind_from_current_state);
10658
10659 /* Information about registers, etc. */
10660 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10661 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10662 set_gdbarch_num_regs (gdbarch, register_count);
10663 set_gdbarch_register_type (gdbarch, arm_register_type);
10664 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10665
10666 /* This "info float" is FPA-specific. Use the generic version if we
10667 do not have FPA. */
10668 if (tdep->have_fpa_registers)
10669 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10670
10671 /* Internal <-> external register number maps. */
10672 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10673 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10674
10675 set_gdbarch_register_name (gdbarch, arm_register_name);
10676
10677 /* Returning results. */
10678 set_gdbarch_return_value (gdbarch, arm_return_value);
10679
10680 /* Disassembly. */
10681 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10682
10683 /* Minsymbol frobbing. */
10684 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10685 set_gdbarch_coff_make_msymbol_special (gdbarch,
10686 arm_coff_make_msymbol_special);
10687 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10688
10689 /* Thumb-2 IT block support. */
10690 set_gdbarch_adjust_breakpoint_address (gdbarch,
10691 arm_adjust_breakpoint_address);
10692
10693 /* Virtual tables. */
10694 set_gdbarch_vbit_in_delta (gdbarch, 1);
10695
10696 /* Hook in the ABI-specific overrides, if they have been registered. */
10697 gdbarch_init_osabi (info, gdbarch);
10698
10699 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10700
10701 /* Add some default predicates. */
10702 if (is_m)
10703 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10704 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10705 dwarf2_append_unwinders (gdbarch);
10706 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10707 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10708 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10709
10710 /* Now we have tuned the configuration, set a few final things,
10711 based on what the OS ABI has told us. */
10712
10713 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10714 binaries are always marked. */
10715 if (tdep->arm_abi == ARM_ABI_AUTO)
10716 tdep->arm_abi = ARM_ABI_APCS;
10717
10718 /* Watchpoints are not steppable. */
10719 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10720
10721 /* We used to default to FPA for generic ARM, but almost nobody
10722 uses that now, and we now provide a way for the user to force
10723 the model. So default to the most useful variant. */
10724 if (tdep->fp_model == ARM_FLOAT_AUTO)
10725 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10726
10727 if (tdep->jb_pc >= 0)
10728 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10729
10730 /* Floating point sizes and format. */
10731 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10732 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10733 {
10734 set_gdbarch_double_format
10735 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10736 set_gdbarch_long_double_format
10737 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10738 }
10739 else
10740 {
10741 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10742 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10743 }
10744
10745 /* Hook used to decorate frames with signed return addresses, only available
10746 for ARMv8.1-m PACBTI. */
10747 if (is_m && have_pacbti)
10748 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10749
10750 if (tdesc_data != nullptr)
10751 {
10752 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10753
10754 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10755 register_count = gdbarch_num_regs (gdbarch);
10756
10757 /* Override tdesc_register_type to adjust the types of VFP
10758 registers for NEON. */
10759 set_gdbarch_register_type (gdbarch, arm_register_type);
10760 }
10761
10762 /* Initialize the pseudo register data. */
10763 int num_pseudos = 0;
10764 if (tdep->have_s_pseudos)
10765 {
10766 /* VFP single precision pseudo registers (S0~S31). */
10767 tdep->s_pseudo_base = register_count;
10768 tdep->s_pseudo_count = 32;
10769 num_pseudos += tdep->s_pseudo_count;
10770
10771 if (tdep->have_q_pseudos)
10772 {
10773 /* NEON quad precision pseudo registers (Q0~Q15). */
10774 tdep->q_pseudo_base = register_count + num_pseudos;
10775
10776 if (have_neon)
10777 tdep->q_pseudo_count = 16;
10778 else if (have_mve)
10779 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10780
10781 num_pseudos += tdep->q_pseudo_count;
10782 }
10783 }
10784
10785 /* Do we have any MVE pseudo registers? */
10786 if (have_mve)
10787 {
10788 tdep->mve_pseudo_base = register_count + num_pseudos;
10789 tdep->mve_pseudo_count = 1;
10790 num_pseudos += tdep->mve_pseudo_count;
10791 }
10792
10793 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10794 if (have_pacbti)
10795 {
10796 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10797 tdep->pacbti_pseudo_count = 1;
10798 num_pseudos += tdep->pacbti_pseudo_count;
10799 }
10800
10801 /* Set some pseudo register hooks, if we have pseudo registers. */
10802 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10803 {
10804 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10805 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10806 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10807 }
10808
10809 /* Add standard register aliases. We add aliases even for those
10810 names which are used by the current architecture - it's simpler,
10811 and does no harm, since nothing ever lists user registers. */
10812 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10813 user_reg_add (gdbarch, arm_register_aliases[i].name,
10814 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10815
10816 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10817 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10818
10819 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10820
10821 return gdbarch;
10822 }
10823
10824 static void
10825 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10826 {
10827 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10828
10829 if (tdep == NULL)
10830 return;
10831
10832 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10833 (int) tdep->fp_model);
10834 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10835 (int) tdep->have_fpa_registers);
10836 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10837 (int) tdep->have_wmmx_registers);
10838 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10839 (int) tdep->vfp_register_count);
10840 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10841 tdep->have_s_pseudos? "true" : "false");
10842 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10843 (int) tdep->s_pseudo_base);
10844 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10845 (int) tdep->s_pseudo_count);
10846 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10847 tdep->have_q_pseudos? "true" : "false");
10848 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10849 (int) tdep->q_pseudo_base);
10850 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10851 (int) tdep->q_pseudo_count);
10852 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10853 (int) tdep->have_neon);
10854 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10855 tdep->have_mve? "yes" : "no");
10856 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10857 tdep->mve_vpr_regnum);
10858 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10859 tdep->mve_pseudo_base);
10860 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10861 tdep->mve_pseudo_count);
10862 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10863 tdep->m_profile_msp_regnum);
10864 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10865 tdep->m_profile_psp_regnum);
10866 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10867 tdep->m_profile_msp_ns_regnum);
10868 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10869 tdep->m_profile_psp_ns_regnum);
10870 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10871 tdep->m_profile_msp_s_regnum);
10872 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10873 tdep->m_profile_psp_s_regnum);
10874 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10875 (unsigned long) tdep->lowest_pc);
10876 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10877 tdep->have_pacbti? "yes" : "no");
10878 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10879 tdep->pacbti_pseudo_base);
10880 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10881 tdep->pacbti_pseudo_count);
10882 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10883 tdep->is_m? "yes" : "no");
10884 }
10885
10886 #if GDB_SELF_TEST
10887 namespace selftests
10888 {
10889 static void arm_record_test (void);
10890 static void arm_analyze_prologue_test ();
10891 }
10892 #endif
10893
10894 void _initialize_arm_tdep ();
10895 void
10896 _initialize_arm_tdep ()
10897 {
10898 long length;
10899 int i, j;
10900 char regdesc[1024], *rdptr = regdesc;
10901 size_t rest = sizeof (regdesc);
10902
10903 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10904
10905 /* Add ourselves to objfile event chain. */
10906 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10907
10908 /* Register an ELF OS ABI sniffer for ARM binaries. */
10909 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10910 bfd_target_elf_flavour,
10911 arm_elf_osabi_sniffer);
10912
10913 /* Add root prefix command for all "set arm"/"show arm" commands. */
10914 add_setshow_prefix_cmd ("arm", no_class,
10915 _("Various ARM-specific commands."),
10916 _("Various ARM-specific commands."),
10917 &setarmcmdlist, &showarmcmdlist,
10918 &setlist, &showlist);
10919
10920 arm_disassembler_options = xstrdup ("reg-names-std");
10921 const disasm_options_t *disasm_options
10922 = &disassembler_options_arm ()->options;
10923 int num_disassembly_styles = 0;
10924 for (i = 0; disasm_options->name[i] != NULL; i++)
10925 if (startswith (disasm_options->name[i], "reg-names-"))
10926 num_disassembly_styles++;
10927
10928 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10929 valid_disassembly_styles = XNEWVEC (const char *,
10930 num_disassembly_styles + 1);
10931 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10932 if (startswith (disasm_options->name[i], "reg-names-"))
10933 {
10934 size_t offset = strlen ("reg-names-");
10935 const char *style = disasm_options->name[i];
10936 valid_disassembly_styles[j++] = &style[offset];
10937 if (strcmp (&style[offset], "std") == 0)
10938 disassembly_style = &style[offset];
10939 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10940 disasm_options->description[i]);
10941 rdptr += length;
10942 rest -= length;
10943 }
10944 /* Mark the end of valid options. */
10945 valid_disassembly_styles[num_disassembly_styles] = NULL;
10946
10947 /* Create the help text. */
10948 std::string helptext = string_printf ("%s%s%s",
10949 _("The valid values are:\n"),
10950 regdesc,
10951 _("The default is \"std\"."));
10952
10953 add_setshow_enum_cmd("disassembler", no_class,
10954 valid_disassembly_styles, &disassembly_style,
10955 _("Set the disassembly style."),
10956 _("Show the disassembly style."),
10957 helptext.c_str (),
10958 set_disassembly_style_sfunc,
10959 show_disassembly_style_sfunc,
10960 &setarmcmdlist, &showarmcmdlist);
10961
10962 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10963 _("Set usage of ARM 32-bit mode."),
10964 _("Show usage of ARM 32-bit mode."),
10965 _("When off, a 26-bit PC will be used."),
10966 NULL,
10967 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10968 mode is %s. */
10969 &setarmcmdlist, &showarmcmdlist);
10970
10971 /* Add a command to allow the user to force the FPU model. */
10972 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10973 _("Set the floating point type."),
10974 _("Show the floating point type."),
10975 _("auto - Determine the FP typefrom the OS-ABI.\n\
10976 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10977 fpa - FPA co-processor (GCC compiled).\n\
10978 softvfp - Software FP with pure-endian doubles.\n\
10979 vfp - VFP co-processor."),
10980 set_fp_model_sfunc, show_fp_model,
10981 &setarmcmdlist, &showarmcmdlist);
10982
10983 /* Add a command to allow the user to force the ABI. */
10984 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10985 _("Set the ABI."),
10986 _("Show the ABI."),
10987 NULL, arm_set_abi, arm_show_abi,
10988 &setarmcmdlist, &showarmcmdlist);
10989
10990 /* Add two commands to allow the user to force the assumed
10991 execution mode. */
10992 add_setshow_enum_cmd ("fallback-mode", class_support,
10993 arm_mode_strings, &arm_fallback_mode_string,
10994 _("Set the mode assumed when symbols are unavailable."),
10995 _("Show the mode assumed when symbols are unavailable."),
10996 NULL, NULL, arm_show_fallback_mode,
10997 &setarmcmdlist, &showarmcmdlist);
10998 add_setshow_enum_cmd ("force-mode", class_support,
10999 arm_mode_strings, &arm_force_mode_string,
11000 _("Set the mode assumed even when symbols are available."),
11001 _("Show the mode assumed even when symbols are available."),
11002 NULL, NULL, arm_show_force_mode,
11003 &setarmcmdlist, &showarmcmdlist);
11004
11005 /* Add a command to stop triggering security exceptions when
11006 unwinding exception stacks. */
11007 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11008 _("Set usage of non-secure to secure exception stack unwinding."),
11009 _("Show usage of non-secure to secure exception stack unwinding."),
11010 _("When on, the debugger can trigger memory access traps."),
11011 NULL, arm_show_unwind_secure_frames,
11012 &setarmcmdlist, &showarmcmdlist);
11013
11014 /* Debugging flag. */
11015 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11016 _("Set ARM debugging."),
11017 _("Show ARM debugging."),
11018 _("When on, arm-specific debugging is enabled."),
11019 NULL,
11020 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11021 &setdebuglist, &showdebuglist);
11022
11023 #if GDB_SELF_TEST
11024 selftests::register_test ("arm-record", selftests::arm_record_test);
11025 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11026 #endif
11027
11028 }
11029
11030 /* ARM-reversible process record data structures. */
11031
11032 #define ARM_INSN_SIZE_BYTES 4
11033 #define THUMB_INSN_SIZE_BYTES 2
11034 #define THUMB2_INSN_SIZE_BYTES 4
11035
11036
11037 /* Position of the bit within a 32-bit ARM instruction
11038 that defines whether the instruction is a load or store. */
11039 #define INSN_S_L_BIT_NUM 20
11040
11041 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11042 do \
11043 { \
11044 unsigned int reg_len = LENGTH; \
11045 if (reg_len) \
11046 { \
11047 REGS = XNEWVEC (uint32_t, reg_len); \
11048 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11049 } \
11050 } \
11051 while (0)
11052
11053 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11054 do \
11055 { \
11056 unsigned int mem_len = LENGTH; \
11057 if (mem_len) \
11058 { \
11059 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11060 memcpy(&MEMS->len, &RECORD_BUF[0], \
11061 sizeof(struct arm_mem_r) * LENGTH); \
11062 } \
11063 } \
11064 while (0)
11065
11066 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11067 #define INSN_RECORDED(ARM_RECORD) \
11068 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11069
11070 /* ARM memory record structure. */
11071 struct arm_mem_r
11072 {
11073 uint32_t len; /* Record length. */
11074 uint32_t addr; /* Memory address. */
11075 };
11076
11077 /* ARM instruction record contains opcode of current insn
11078 and execution state (before entry to decode_insn()),
11079 contains list of to-be-modified registers and
11080 memory blocks (on return from decode_insn()). */
11081
11082 struct arm_insn_decode_record
11083 {
11084 struct gdbarch *gdbarch;
11085 struct regcache *regcache;
11086 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11087 uint32_t arm_insn; /* Should accommodate thumb. */
11088 uint32_t cond; /* Condition code. */
11089 uint32_t opcode; /* Insn opcode. */
11090 uint32_t decode; /* Insn decode bits. */
11091 uint32_t mem_rec_count; /* No of mem records. */
11092 uint32_t reg_rec_count; /* No of reg records. */
11093 uint32_t *arm_regs; /* Registers to be saved for this record. */
11094 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11095 };
11096
11097
11098 /* Checks ARM SBZ and SBO mandatory fields. */
11099
11100 static int
11101 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11102 {
11103 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11104
11105 if (!len)
11106 return 1;
11107
11108 if (!sbo)
11109 ones = ~ones;
11110
11111 while (ones)
11112 {
11113 if (!(ones & sbo))
11114 {
11115 return 0;
11116 }
11117 ones = ones >> 1;
11118 }
11119 return 1;
11120 }
11121
11122 enum arm_record_result
11123 {
11124 ARM_RECORD_SUCCESS = 0,
11125 ARM_RECORD_FAILURE = 1
11126 };
11127
11128 enum arm_record_strx_t
11129 {
11130 ARM_RECORD_STRH=1,
11131 ARM_RECORD_STRD
11132 };
11133
11134 enum record_type_t
11135 {
11136 ARM_RECORD=1,
11137 THUMB_RECORD,
11138 THUMB2_RECORD
11139 };
11140
11141
11142 static int
11143 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11144 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11145 {
11146
11147 struct regcache *reg_cache = arm_insn_r->regcache;
11148 ULONGEST u_regval[2]= {0};
11149
11150 uint32_t reg_src1 = 0, reg_src2 = 0;
11151 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11152
11153 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11154 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11155
11156 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11157 {
11158 /* 1) Handle misc store, immediate offset. */
11159 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11160 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11161 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11162 regcache_raw_read_unsigned (reg_cache, reg_src1,
11163 &u_regval[0]);
11164 if (ARM_PC_REGNUM == reg_src1)
11165 {
11166 /* If R15 was used as Rn, hence current PC+8. */
11167 u_regval[0] = u_regval[0] + 8;
11168 }
11169 offset_8 = (immed_high << 4) | immed_low;
11170 /* Calculate target store address. */
11171 if (14 == arm_insn_r->opcode)
11172 {
11173 tgt_mem_addr = u_regval[0] + offset_8;
11174 }
11175 else
11176 {
11177 tgt_mem_addr = u_regval[0] - offset_8;
11178 }
11179 if (ARM_RECORD_STRH == str_type)
11180 {
11181 record_buf_mem[0] = 2;
11182 record_buf_mem[1] = tgt_mem_addr;
11183 arm_insn_r->mem_rec_count = 1;
11184 }
11185 else if (ARM_RECORD_STRD == str_type)
11186 {
11187 record_buf_mem[0] = 4;
11188 record_buf_mem[1] = tgt_mem_addr;
11189 record_buf_mem[2] = 4;
11190 record_buf_mem[3] = tgt_mem_addr + 4;
11191 arm_insn_r->mem_rec_count = 2;
11192 }
11193 }
11194 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11195 {
11196 /* 2) Store, register offset. */
11197 /* Get Rm. */
11198 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11199 /* Get Rn. */
11200 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11201 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11202 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11203 if (15 == reg_src2)
11204 {
11205 /* If R15 was used as Rn, hence current PC+8. */
11206 u_regval[0] = u_regval[0] + 8;
11207 }
11208 /* Calculate target store address, Rn +/- Rm, register offset. */
11209 if (12 == arm_insn_r->opcode)
11210 {
11211 tgt_mem_addr = u_regval[0] + u_regval[1];
11212 }
11213 else
11214 {
11215 tgt_mem_addr = u_regval[1] - u_regval[0];
11216 }
11217 if (ARM_RECORD_STRH == str_type)
11218 {
11219 record_buf_mem[0] = 2;
11220 record_buf_mem[1] = tgt_mem_addr;
11221 arm_insn_r->mem_rec_count = 1;
11222 }
11223 else if (ARM_RECORD_STRD == str_type)
11224 {
11225 record_buf_mem[0] = 4;
11226 record_buf_mem[1] = tgt_mem_addr;
11227 record_buf_mem[2] = 4;
11228 record_buf_mem[3] = tgt_mem_addr + 4;
11229 arm_insn_r->mem_rec_count = 2;
11230 }
11231 }
11232 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11233 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11234 {
11235 /* 3) Store, immediate pre-indexed. */
11236 /* 5) Store, immediate post-indexed. */
11237 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11238 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11239 offset_8 = (immed_high << 4) | immed_low;
11240 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11241 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11242 /* Calculate target store address, Rn +/- Rm, register offset. */
11243 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11244 {
11245 tgt_mem_addr = u_regval[0] + offset_8;
11246 }
11247 else
11248 {
11249 tgt_mem_addr = u_regval[0] - offset_8;
11250 }
11251 if (ARM_RECORD_STRH == str_type)
11252 {
11253 record_buf_mem[0] = 2;
11254 record_buf_mem[1] = tgt_mem_addr;
11255 arm_insn_r->mem_rec_count = 1;
11256 }
11257 else if (ARM_RECORD_STRD == str_type)
11258 {
11259 record_buf_mem[0] = 4;
11260 record_buf_mem[1] = tgt_mem_addr;
11261 record_buf_mem[2] = 4;
11262 record_buf_mem[3] = tgt_mem_addr + 4;
11263 arm_insn_r->mem_rec_count = 2;
11264 }
11265 /* Record Rn also as it changes. */
11266 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11267 arm_insn_r->reg_rec_count = 1;
11268 }
11269 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11270 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11271 {
11272 /* 4) Store, register pre-indexed. */
11273 /* 6) Store, register post -indexed. */
11274 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11275 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11276 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11277 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11278 /* Calculate target store address, Rn +/- Rm, register offset. */
11279 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11280 {
11281 tgt_mem_addr = u_regval[0] + u_regval[1];
11282 }
11283 else
11284 {
11285 tgt_mem_addr = u_regval[1] - u_regval[0];
11286 }
11287 if (ARM_RECORD_STRH == str_type)
11288 {
11289 record_buf_mem[0] = 2;
11290 record_buf_mem[1] = tgt_mem_addr;
11291 arm_insn_r->mem_rec_count = 1;
11292 }
11293 else if (ARM_RECORD_STRD == str_type)
11294 {
11295 record_buf_mem[0] = 4;
11296 record_buf_mem[1] = tgt_mem_addr;
11297 record_buf_mem[2] = 4;
11298 record_buf_mem[3] = tgt_mem_addr + 4;
11299 arm_insn_r->mem_rec_count = 2;
11300 }
11301 /* Record Rn also as it changes. */
11302 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11303 arm_insn_r->reg_rec_count = 1;
11304 }
11305 return 0;
11306 }
11307
11308 /* Handling ARM extension space insns. */
11309
11310 static int
11311 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11312 {
11313 int ret = 0; /* Return value: -1:record failure ; 0:success */
11314 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11315 uint32_t record_buf[8], record_buf_mem[8];
11316 uint32_t reg_src1 = 0;
11317 struct regcache *reg_cache = arm_insn_r->regcache;
11318 ULONGEST u_regval = 0;
11319
11320 gdb_assert (!INSN_RECORDED(arm_insn_r));
11321 /* Handle unconditional insn extension space. */
11322
11323 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11324 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11325 if (arm_insn_r->cond)
11326 {
11327 /* PLD has no affect on architectural state, it just affects
11328 the caches. */
11329 if (5 == ((opcode1 & 0xE0) >> 5))
11330 {
11331 /* BLX(1) */
11332 record_buf[0] = ARM_PS_REGNUM;
11333 record_buf[1] = ARM_LR_REGNUM;
11334 arm_insn_r->reg_rec_count = 2;
11335 }
11336 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11337 }
11338
11339
11340 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11341 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11342 {
11343 ret = -1;
11344 /* Undefined instruction on ARM V5; need to handle if later
11345 versions define it. */
11346 }
11347
11348 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11349 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11350 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11351
11352 /* Handle arithmetic insn extension space. */
11353 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11354 && !INSN_RECORDED(arm_insn_r))
11355 {
11356 /* Handle MLA(S) and MUL(S). */
11357 if (in_inclusive_range (insn_op1, 0U, 3U))
11358 {
11359 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11360 record_buf[1] = ARM_PS_REGNUM;
11361 arm_insn_r->reg_rec_count = 2;
11362 }
11363 else if (in_inclusive_range (insn_op1, 4U, 15U))
11364 {
11365 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11366 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11367 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11368 record_buf[2] = ARM_PS_REGNUM;
11369 arm_insn_r->reg_rec_count = 3;
11370 }
11371 }
11372
11373 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11374 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11375 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11376
11377 /* Handle control insn extension space. */
11378
11379 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11380 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11381 {
11382 if (!bit (arm_insn_r->arm_insn,25))
11383 {
11384 if (!bits (arm_insn_r->arm_insn, 4, 7))
11385 {
11386 if ((0 == insn_op1) || (2 == insn_op1))
11387 {
11388 /* MRS. */
11389 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11390 arm_insn_r->reg_rec_count = 1;
11391 }
11392 else if (1 == insn_op1)
11393 {
11394 /* CSPR is going to be changed. */
11395 record_buf[0] = ARM_PS_REGNUM;
11396 arm_insn_r->reg_rec_count = 1;
11397 }
11398 else if (3 == insn_op1)
11399 {
11400 /* SPSR is going to be changed. */
11401 /* We need to get SPSR value, which is yet to be done. */
11402 return -1;
11403 }
11404 }
11405 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11406 {
11407 if (1 == insn_op1)
11408 {
11409 /* BX. */
11410 record_buf[0] = ARM_PS_REGNUM;
11411 arm_insn_r->reg_rec_count = 1;
11412 }
11413 else if (3 == insn_op1)
11414 {
11415 /* CLZ. */
11416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11417 arm_insn_r->reg_rec_count = 1;
11418 }
11419 }
11420 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11421 {
11422 /* BLX. */
11423 record_buf[0] = ARM_PS_REGNUM;
11424 record_buf[1] = ARM_LR_REGNUM;
11425 arm_insn_r->reg_rec_count = 2;
11426 }
11427 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11428 {
11429 /* QADD, QSUB, QDADD, QDSUB */
11430 record_buf[0] = ARM_PS_REGNUM;
11431 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11432 arm_insn_r->reg_rec_count = 2;
11433 }
11434 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11435 {
11436 /* BKPT. */
11437 record_buf[0] = ARM_PS_REGNUM;
11438 record_buf[1] = ARM_LR_REGNUM;
11439 arm_insn_r->reg_rec_count = 2;
11440
11441 /* Save SPSR also;how? */
11442 return -1;
11443 }
11444 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11445 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11446 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11447 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11448 )
11449 {
11450 if (0 == insn_op1 || 1 == insn_op1)
11451 {
11452 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11453 /* We dont do optimization for SMULW<y> where we
11454 need only Rd. */
11455 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11456 record_buf[1] = ARM_PS_REGNUM;
11457 arm_insn_r->reg_rec_count = 2;
11458 }
11459 else if (2 == insn_op1)
11460 {
11461 /* SMLAL<x><y>. */
11462 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11463 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11464 arm_insn_r->reg_rec_count = 2;
11465 }
11466 else if (3 == insn_op1)
11467 {
11468 /* SMUL<x><y>. */
11469 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11470 arm_insn_r->reg_rec_count = 1;
11471 }
11472 }
11473 }
11474 else
11475 {
11476 /* MSR : immediate form. */
11477 if (1 == insn_op1)
11478 {
11479 /* CSPR is going to be changed. */
11480 record_buf[0] = ARM_PS_REGNUM;
11481 arm_insn_r->reg_rec_count = 1;
11482 }
11483 else if (3 == insn_op1)
11484 {
11485 /* SPSR is going to be changed. */
11486 /* we need to get SPSR value, which is yet to be done */
11487 return -1;
11488 }
11489 }
11490 }
11491
11492 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11493 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11494 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11495
11496 /* Handle load/store insn extension space. */
11497
11498 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11499 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11500 && !INSN_RECORDED(arm_insn_r))
11501 {
11502 /* SWP/SWPB. */
11503 if (0 == insn_op1)
11504 {
11505 /* These insn, changes register and memory as well. */
11506 /* SWP or SWPB insn. */
11507 /* Get memory address given by Rn. */
11508 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11509 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11510 /* SWP insn ?, swaps word. */
11511 if (8 == arm_insn_r->opcode)
11512 {
11513 record_buf_mem[0] = 4;
11514 }
11515 else
11516 {
11517 /* SWPB insn, swaps only byte. */
11518 record_buf_mem[0] = 1;
11519 }
11520 record_buf_mem[1] = u_regval;
11521 arm_insn_r->mem_rec_count = 1;
11522 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11523 arm_insn_r->reg_rec_count = 1;
11524 }
11525 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11526 {
11527 /* STRH. */
11528 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11529 ARM_RECORD_STRH);
11530 }
11531 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11532 {
11533 /* LDRD. */
11534 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11535 record_buf[1] = record_buf[0] + 1;
11536 arm_insn_r->reg_rec_count = 2;
11537 }
11538 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11539 {
11540 /* STRD. */
11541 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11542 ARM_RECORD_STRD);
11543 }
11544 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11545 {
11546 /* LDRH, LDRSB, LDRSH. */
11547 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11548 arm_insn_r->reg_rec_count = 1;
11549 }
11550
11551 }
11552
11553 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11554 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11555 && !INSN_RECORDED(arm_insn_r))
11556 {
11557 ret = -1;
11558 /* Handle coprocessor insn extension space. */
11559 }
11560
11561 /* To be done for ARMv5 and later; as of now we return -1. */
11562 if (-1 == ret)
11563 return ret;
11564
11565 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11566 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11567
11568 return ret;
11569 }
11570
11571 /* Handling opcode 000 insns. */
11572
11573 static int
11574 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11575 {
11576 struct regcache *reg_cache = arm_insn_r->regcache;
11577 uint32_t record_buf[8], record_buf_mem[8];
11578 ULONGEST u_regval[2] = {0};
11579
11580 uint32_t reg_src1 = 0;
11581 uint32_t opcode1 = 0;
11582
11583 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11584 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11585 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11586
11587 if (!((opcode1 & 0x19) == 0x10))
11588 {
11589 /* Data-processing (register) and Data-processing (register-shifted
11590 register */
11591 /* Out of 11 shifter operands mode, all the insn modifies destination
11592 register, which is specified by 13-16 decode. */
11593 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11594 record_buf[1] = ARM_PS_REGNUM;
11595 arm_insn_r->reg_rec_count = 2;
11596 }
11597 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11598 {
11599 /* Miscellaneous instructions */
11600
11601 if (3 == arm_insn_r->decode && 0x12 == opcode1
11602 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11603 {
11604 /* Handle BLX, branch and link/exchange. */
11605 if (9 == arm_insn_r->opcode)
11606 {
11607 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11608 and R14 stores the return address. */
11609 record_buf[0] = ARM_PS_REGNUM;
11610 record_buf[1] = ARM_LR_REGNUM;
11611 arm_insn_r->reg_rec_count = 2;
11612 }
11613 }
11614 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11615 {
11616 /* Handle enhanced software breakpoint insn, BKPT. */
11617 /* CPSR is changed to be executed in ARM state, disabling normal
11618 interrupts, entering abort mode. */
11619 /* According to high vector configuration PC is set. */
11620 /* user hit breakpoint and type reverse, in
11621 that case, we need to go back with previous CPSR and
11622 Program Counter. */
11623 record_buf[0] = ARM_PS_REGNUM;
11624 record_buf[1] = ARM_LR_REGNUM;
11625 arm_insn_r->reg_rec_count = 2;
11626
11627 /* Save SPSR also; how? */
11628 return -1;
11629 }
11630 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11631 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11632 {
11633 /* Handle BX, branch and link/exchange. */
11634 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11635 record_buf[0] = ARM_PS_REGNUM;
11636 arm_insn_r->reg_rec_count = 1;
11637 }
11638 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11639 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11640 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11641 {
11642 /* Count leading zeros: CLZ. */
11643 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11644 arm_insn_r->reg_rec_count = 1;
11645 }
11646 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11647 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11648 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11649 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11650 {
11651 /* Handle MRS insn. */
11652 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11653 arm_insn_r->reg_rec_count = 1;
11654 }
11655 }
11656 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11657 {
11658 /* Multiply and multiply-accumulate */
11659
11660 /* Handle multiply instructions. */
11661 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11662 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11663 {
11664 /* Handle MLA and MUL. */
11665 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11666 record_buf[1] = ARM_PS_REGNUM;
11667 arm_insn_r->reg_rec_count = 2;
11668 }
11669 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11670 {
11671 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11672 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11673 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11674 record_buf[2] = ARM_PS_REGNUM;
11675 arm_insn_r->reg_rec_count = 3;
11676 }
11677 }
11678 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11679 {
11680 /* Synchronization primitives */
11681
11682 /* Handling SWP, SWPB. */
11683 /* These insn, changes register and memory as well. */
11684 /* SWP or SWPB insn. */
11685
11686 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11687 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11688 /* SWP insn ?, swaps word. */
11689 if (8 == arm_insn_r->opcode)
11690 {
11691 record_buf_mem[0] = 4;
11692 }
11693 else
11694 {
11695 /* SWPB insn, swaps only byte. */
11696 record_buf_mem[0] = 1;
11697 }
11698 record_buf_mem[1] = u_regval[0];
11699 arm_insn_r->mem_rec_count = 1;
11700 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11701 arm_insn_r->reg_rec_count = 1;
11702 }
11703 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11704 || 15 == arm_insn_r->decode)
11705 {
11706 if ((opcode1 & 0x12) == 2)
11707 {
11708 /* Extra load/store (unprivileged) */
11709 return -1;
11710 }
11711 else
11712 {
11713 /* Extra load/store */
11714 switch (bits (arm_insn_r->arm_insn, 5, 6))
11715 {
11716 case 1:
11717 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11718 {
11719 /* STRH (register), STRH (immediate) */
11720 arm_record_strx (arm_insn_r, &record_buf[0],
11721 &record_buf_mem[0], ARM_RECORD_STRH);
11722 }
11723 else if ((opcode1 & 0x05) == 0x1)
11724 {
11725 /* LDRH (register) */
11726 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11727 arm_insn_r->reg_rec_count = 1;
11728
11729 if (bit (arm_insn_r->arm_insn, 21))
11730 {
11731 /* Write back to Rn. */
11732 record_buf[arm_insn_r->reg_rec_count++]
11733 = bits (arm_insn_r->arm_insn, 16, 19);
11734 }
11735 }
11736 else if ((opcode1 & 0x05) == 0x5)
11737 {
11738 /* LDRH (immediate), LDRH (literal) */
11739 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11740
11741 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11742 arm_insn_r->reg_rec_count = 1;
11743
11744 if (rn != 15)
11745 {
11746 /*LDRH (immediate) */
11747 if (bit (arm_insn_r->arm_insn, 21))
11748 {
11749 /* Write back to Rn. */
11750 record_buf[arm_insn_r->reg_rec_count++] = rn;
11751 }
11752 }
11753 }
11754 else
11755 return -1;
11756 break;
11757 case 2:
11758 if ((opcode1 & 0x05) == 0x0)
11759 {
11760 /* LDRD (register) */
11761 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11762 record_buf[1] = record_buf[0] + 1;
11763 arm_insn_r->reg_rec_count = 2;
11764
11765 if (bit (arm_insn_r->arm_insn, 21))
11766 {
11767 /* Write back to Rn. */
11768 record_buf[arm_insn_r->reg_rec_count++]
11769 = bits (arm_insn_r->arm_insn, 16, 19);
11770 }
11771 }
11772 else if ((opcode1 & 0x05) == 0x1)
11773 {
11774 /* LDRSB (register) */
11775 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11776 arm_insn_r->reg_rec_count = 1;
11777
11778 if (bit (arm_insn_r->arm_insn, 21))
11779 {
11780 /* Write back to Rn. */
11781 record_buf[arm_insn_r->reg_rec_count++]
11782 = bits (arm_insn_r->arm_insn, 16, 19);
11783 }
11784 }
11785 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11786 {
11787 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11788 LDRSB (literal) */
11789 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11790
11791 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11792 arm_insn_r->reg_rec_count = 1;
11793
11794 if (rn != 15)
11795 {
11796 /*LDRD (immediate), LDRSB (immediate) */
11797 if (bit (arm_insn_r->arm_insn, 21))
11798 {
11799 /* Write back to Rn. */
11800 record_buf[arm_insn_r->reg_rec_count++] = rn;
11801 }
11802 }
11803 }
11804 else
11805 return -1;
11806 break;
11807 case 3:
11808 if ((opcode1 & 0x05) == 0x0)
11809 {
11810 /* STRD (register) */
11811 arm_record_strx (arm_insn_r, &record_buf[0],
11812 &record_buf_mem[0], ARM_RECORD_STRD);
11813 }
11814 else if ((opcode1 & 0x05) == 0x1)
11815 {
11816 /* LDRSH (register) */
11817 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11818 arm_insn_r->reg_rec_count = 1;
11819
11820 if (bit (arm_insn_r->arm_insn, 21))
11821 {
11822 /* Write back to Rn. */
11823 record_buf[arm_insn_r->reg_rec_count++]
11824 = bits (arm_insn_r->arm_insn, 16, 19);
11825 }
11826 }
11827 else if ((opcode1 & 0x05) == 0x4)
11828 {
11829 /* STRD (immediate) */
11830 arm_record_strx (arm_insn_r, &record_buf[0],
11831 &record_buf_mem[0], ARM_RECORD_STRD);
11832 }
11833 else if ((opcode1 & 0x05) == 0x5)
11834 {
11835 /* LDRSH (immediate), LDRSH (literal) */
11836 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11837 arm_insn_r->reg_rec_count = 1;
11838
11839 if (bit (arm_insn_r->arm_insn, 21))
11840 {
11841 /* Write back to Rn. */
11842 record_buf[arm_insn_r->reg_rec_count++]
11843 = bits (arm_insn_r->arm_insn, 16, 19);
11844 }
11845 }
11846 else
11847 return -1;
11848 break;
11849 default:
11850 return -1;
11851 }
11852 }
11853 }
11854 else
11855 {
11856 return -1;
11857 }
11858
11859 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11860 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11861 return 0;
11862 }
11863
11864 /* Handling opcode 001 insns. */
11865
11866 static int
11867 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11868 {
11869 uint32_t record_buf[8], record_buf_mem[8];
11870
11871 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11872 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11873
11874 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11875 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11876 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11877 )
11878 {
11879 /* Handle MSR insn. */
11880 if (9 == arm_insn_r->opcode)
11881 {
11882 /* CSPR is going to be changed. */
11883 record_buf[0] = ARM_PS_REGNUM;
11884 arm_insn_r->reg_rec_count = 1;
11885 }
11886 else
11887 {
11888 /* SPSR is going to be changed. */
11889 }
11890 }
11891 else if (arm_insn_r->opcode <= 15)
11892 {
11893 /* Normal data processing insns. */
11894 /* Out of 11 shifter operands mode, all the insn modifies destination
11895 register, which is specified by 13-16 decode. */
11896 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11897 record_buf[1] = ARM_PS_REGNUM;
11898 arm_insn_r->reg_rec_count = 2;
11899 }
11900 else
11901 {
11902 return -1;
11903 }
11904
11905 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11906 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11907 return 0;
11908 }
11909
11910 static int
11911 arm_record_media (arm_insn_decode_record *arm_insn_r)
11912 {
11913 uint32_t record_buf[8];
11914
11915 switch (bits (arm_insn_r->arm_insn, 22, 24))
11916 {
11917 case 0:
11918 /* Parallel addition and subtraction, signed */
11919 case 1:
11920 /* Parallel addition and subtraction, unsigned */
11921 case 2:
11922 case 3:
11923 /* Packing, unpacking, saturation and reversal */
11924 {
11925 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11926
11927 record_buf[arm_insn_r->reg_rec_count++] = rd;
11928 }
11929 break;
11930
11931 case 4:
11932 case 5:
11933 /* Signed multiplies */
11934 {
11935 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11936 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11937
11938 record_buf[arm_insn_r->reg_rec_count++] = rd;
11939 if (op1 == 0x0)
11940 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11941 else if (op1 == 0x4)
11942 record_buf[arm_insn_r->reg_rec_count++]
11943 = bits (arm_insn_r->arm_insn, 12, 15);
11944 }
11945 break;
11946
11947 case 6:
11948 {
11949 if (bit (arm_insn_r->arm_insn, 21)
11950 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
11951 {
11952 /* SBFX */
11953 record_buf[arm_insn_r->reg_rec_count++]
11954 = bits (arm_insn_r->arm_insn, 12, 15);
11955 }
11956 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
11957 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
11958 {
11959 /* USAD8 and USADA8 */
11960 record_buf[arm_insn_r->reg_rec_count++]
11961 = bits (arm_insn_r->arm_insn, 16, 19);
11962 }
11963 }
11964 break;
11965
11966 case 7:
11967 {
11968 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
11969 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
11970 {
11971 /* Permanently UNDEFINED */
11972 return -1;
11973 }
11974 else
11975 {
11976 /* BFC, BFI and UBFX */
11977 record_buf[arm_insn_r->reg_rec_count++]
11978 = bits (arm_insn_r->arm_insn, 12, 15);
11979 }
11980 }
11981 break;
11982
11983 default:
11984 return -1;
11985 }
11986
11987 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11988
11989 return 0;
11990 }
11991
11992 /* Handle ARM mode instructions with opcode 010. */
11993
11994 static int
11995 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
11996 {
11997 struct regcache *reg_cache = arm_insn_r->regcache;
11998
11999 uint32_t reg_base , reg_dest;
12000 uint32_t offset_12, tgt_mem_addr;
12001 uint32_t record_buf[8], record_buf_mem[8];
12002 unsigned char wback;
12003 ULONGEST u_regval;
12004
12005 /* Calculate wback. */
12006 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12007 || (bit (arm_insn_r->arm_insn, 21) == 1);
12008
12009 arm_insn_r->reg_rec_count = 0;
12010 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12011
12012 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12013 {
12014 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12015 and LDRT. */
12016
12017 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12018 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12019
12020 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12021 preceeds a LDR instruction having R15 as reg_base, it
12022 emulates a branch and link instruction, and hence we need to save
12023 CPSR and PC as well. */
12024 if (ARM_PC_REGNUM == reg_dest)
12025 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12026
12027 /* If wback is true, also save the base register, which is going to be
12028 written to. */
12029 if (wback)
12030 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12031 }
12032 else
12033 {
12034 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12035
12036 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12037 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12038
12039 /* Handle bit U. */
12040 if (bit (arm_insn_r->arm_insn, 23))
12041 {
12042 /* U == 1: Add the offset. */
12043 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12044 }
12045 else
12046 {
12047 /* U == 0: subtract the offset. */
12048 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12049 }
12050
12051 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12052 bytes. */
12053 if (bit (arm_insn_r->arm_insn, 22))
12054 {
12055 /* STRB and STRBT: 1 byte. */
12056 record_buf_mem[0] = 1;
12057 }
12058 else
12059 {
12060 /* STR and STRT: 4 bytes. */
12061 record_buf_mem[0] = 4;
12062 }
12063
12064 /* Handle bit P. */
12065 if (bit (arm_insn_r->arm_insn, 24))
12066 record_buf_mem[1] = tgt_mem_addr;
12067 else
12068 record_buf_mem[1] = (uint32_t) u_regval;
12069
12070 arm_insn_r->mem_rec_count = 1;
12071
12072 /* If wback is true, also save the base register, which is going to be
12073 written to. */
12074 if (wback)
12075 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12076 }
12077
12078 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12079 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12080 return 0;
12081 }
12082
12083 /* Handling opcode 011 insns. */
12084
12085 static int
12086 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12087 {
12088 struct regcache *reg_cache = arm_insn_r->regcache;
12089
12090 uint32_t shift_imm = 0;
12091 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12092 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12093 uint32_t record_buf[8], record_buf_mem[8];
12094
12095 LONGEST s_word;
12096 ULONGEST u_regval[2];
12097
12098 if (bit (arm_insn_r->arm_insn, 4))
12099 return arm_record_media (arm_insn_r);
12100
12101 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12102 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12103
12104 /* Handle enhanced store insns and LDRD DSP insn,
12105 order begins according to addressing modes for store insns
12106 STRH insn. */
12107
12108 /* LDR or STR? */
12109 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12110 {
12111 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12112 /* LDR insn has a capability to do branching, if
12113 MOV LR, PC is preceded by LDR insn having Rn as R15
12114 in that case, it emulates branch and link insn, and hence we
12115 need to save CSPR and PC as well. */
12116 if (15 != reg_dest)
12117 {
12118 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12119 arm_insn_r->reg_rec_count = 1;
12120 }
12121 else
12122 {
12123 record_buf[0] = reg_dest;
12124 record_buf[1] = ARM_PS_REGNUM;
12125 arm_insn_r->reg_rec_count = 2;
12126 }
12127 }
12128 else
12129 {
12130 if (! bits (arm_insn_r->arm_insn, 4, 11))
12131 {
12132 /* Store insn, register offset and register pre-indexed,
12133 register post-indexed. */
12134 /* Get Rm. */
12135 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12136 /* Get Rn. */
12137 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12138 regcache_raw_read_unsigned (reg_cache, reg_src1
12139 , &u_regval[0]);
12140 regcache_raw_read_unsigned (reg_cache, reg_src2
12141 , &u_regval[1]);
12142 if (15 == reg_src2)
12143 {
12144 /* If R15 was used as Rn, hence current PC+8. */
12145 /* Pre-indexed mode doesnt reach here ; illegal insn. */
12146 u_regval[0] = u_regval[0] + 8;
12147 }
12148 /* Calculate target store address, Rn +/- Rm, register offset. */
12149 /* U == 1. */
12150 if (bit (arm_insn_r->arm_insn, 23))
12151 {
12152 tgt_mem_addr = u_regval[0] + u_regval[1];
12153 }
12154 else
12155 {
12156 tgt_mem_addr = u_regval[1] - u_regval[0];
12157 }
12158
12159 switch (arm_insn_r->opcode)
12160 {
12161 /* STR. */
12162 case 8:
12163 case 12:
12164 /* STR. */
12165 case 9:
12166 case 13:
12167 /* STRT. */
12168 case 1:
12169 case 5:
12170 /* STR. */
12171 case 0:
12172 case 4:
12173 record_buf_mem[0] = 4;
12174 break;
12175
12176 /* STRB. */
12177 case 10:
12178 case 14:
12179 /* STRB. */
12180 case 11:
12181 case 15:
12182 /* STRBT. */
12183 case 3:
12184 case 7:
12185 /* STRB. */
12186 case 2:
12187 case 6:
12188 record_buf_mem[0] = 1;
12189 break;
12190
12191 default:
12192 gdb_assert_not_reached ("no decoding pattern found");
12193 break;
12194 }
12195 record_buf_mem[1] = tgt_mem_addr;
12196 arm_insn_r->mem_rec_count = 1;
12197
12198 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12199 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12200 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12201 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12202 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12203 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12204 )
12205 {
12206 /* Rn is going to be changed in pre-indexed mode and
12207 post-indexed mode as well. */
12208 record_buf[0] = reg_src2;
12209 arm_insn_r->reg_rec_count = 1;
12210 }
12211 }
12212 else
12213 {
12214 /* Store insn, scaled register offset; scaled pre-indexed. */
12215 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12216 /* Get Rm. */
12217 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12218 /* Get Rn. */
12219 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12220 /* Get shift_imm. */
12221 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12222 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12223 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12224 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12225 /* Offset_12 used as shift. */
12226 switch (offset_12)
12227 {
12228 case 0:
12229 /* Offset_12 used as index. */
12230 offset_12 = u_regval[0] << shift_imm;
12231 break;
12232
12233 case 1:
12234 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
12235 break;
12236
12237 case 2:
12238 if (!shift_imm)
12239 {
12240 if (bit (u_regval[0], 31))
12241 {
12242 offset_12 = 0xFFFFFFFF;
12243 }
12244 else
12245 {
12246 offset_12 = 0;
12247 }
12248 }
12249 else
12250 {
12251 /* This is arithmetic shift. */
12252 offset_12 = s_word >> shift_imm;
12253 }
12254 break;
12255
12256 case 3:
12257 if (!shift_imm)
12258 {
12259 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12260 &u_regval[1]);
12261 /* Get C flag value and shift it by 31. */
12262 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12263 | (u_regval[0]) >> 1);
12264 }
12265 else
12266 {
12267 offset_12 = (u_regval[0] >> shift_imm) \
12268 | (u_regval[0] <<
12269 (sizeof(uint32_t) - shift_imm));
12270 }
12271 break;
12272
12273 default:
12274 gdb_assert_not_reached ("no decoding pattern found");
12275 break;
12276 }
12277
12278 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12279 /* bit U set. */
12280 if (bit (arm_insn_r->arm_insn, 23))
12281 {
12282 tgt_mem_addr = u_regval[1] + offset_12;
12283 }
12284 else
12285 {
12286 tgt_mem_addr = u_regval[1] - offset_12;
12287 }
12288
12289 switch (arm_insn_r->opcode)
12290 {
12291 /* STR. */
12292 case 8:
12293 case 12:
12294 /* STR. */
12295 case 9:
12296 case 13:
12297 /* STRT. */
12298 case 1:
12299 case 5:
12300 /* STR. */
12301 case 0:
12302 case 4:
12303 record_buf_mem[0] = 4;
12304 break;
12305
12306 /* STRB. */
12307 case 10:
12308 case 14:
12309 /* STRB. */
12310 case 11:
12311 case 15:
12312 /* STRBT. */
12313 case 3:
12314 case 7:
12315 /* STRB. */
12316 case 2:
12317 case 6:
12318 record_buf_mem[0] = 1;
12319 break;
12320
12321 default:
12322 gdb_assert_not_reached ("no decoding pattern found");
12323 break;
12324 }
12325 record_buf_mem[1] = tgt_mem_addr;
12326 arm_insn_r->mem_rec_count = 1;
12327
12328 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12329 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12330 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12331 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12332 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12333 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12334 )
12335 {
12336 /* Rn is going to be changed in register scaled pre-indexed
12337 mode,and scaled post indexed mode. */
12338 record_buf[0] = reg_src2;
12339 arm_insn_r->reg_rec_count = 1;
12340 }
12341 }
12342 }
12343
12344 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12345 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12346 return 0;
12347 }
12348
12349 /* Handle ARM mode instructions with opcode 100. */
12350
12351 static int
12352 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12353 {
12354 struct regcache *reg_cache = arm_insn_r->regcache;
12355 uint32_t register_count = 0, register_bits;
12356 uint32_t reg_base, addr_mode;
12357 uint32_t record_buf[24], record_buf_mem[48];
12358 uint32_t wback;
12359 ULONGEST u_regval;
12360
12361 /* Fetch the list of registers. */
12362 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12363 arm_insn_r->reg_rec_count = 0;
12364
12365 /* Fetch the base register that contains the address we are loading data
12366 to. */
12367 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12368
12369 /* Calculate wback. */
12370 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12371
12372 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12373 {
12374 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12375
12376 /* Find out which registers are going to be loaded from memory. */
12377 while (register_bits)
12378 {
12379 if (register_bits & 0x00000001)
12380 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12381 register_bits = register_bits >> 1;
12382 register_count++;
12383 }
12384
12385
12386 /* If wback is true, also save the base register, which is going to be
12387 written to. */
12388 if (wback)
12389 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12390
12391 /* Save the CPSR register. */
12392 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12393 }
12394 else
12395 {
12396 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12397
12398 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12399
12400 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12401
12402 /* Find out how many registers are going to be stored to memory. */
12403 while (register_bits)
12404 {
12405 if (register_bits & 0x00000001)
12406 register_count++;
12407 register_bits = register_bits >> 1;
12408 }
12409
12410 switch (addr_mode)
12411 {
12412 /* STMDA (STMED): Decrement after. */
12413 case 0:
12414 record_buf_mem[1] = (uint32_t) u_regval
12415 - register_count * ARM_INT_REGISTER_SIZE + 4;
12416 break;
12417 /* STM (STMIA, STMEA): Increment after. */
12418 case 1:
12419 record_buf_mem[1] = (uint32_t) u_regval;
12420 break;
12421 /* STMDB (STMFD): Decrement before. */
12422 case 2:
12423 record_buf_mem[1] = (uint32_t) u_regval
12424 - register_count * ARM_INT_REGISTER_SIZE;
12425 break;
12426 /* STMIB (STMFA): Increment before. */
12427 case 3:
12428 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12429 break;
12430 default:
12431 gdb_assert_not_reached ("no decoding pattern found");
12432 break;
12433 }
12434
12435 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12436 arm_insn_r->mem_rec_count = 1;
12437
12438 /* If wback is true, also save the base register, which is going to be
12439 written to. */
12440 if (wback)
12441 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12442 }
12443
12444 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12445 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12446 return 0;
12447 }
12448
12449 /* Handling opcode 101 insns. */
12450
12451 static int
12452 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12453 {
12454 uint32_t record_buf[8];
12455
12456 /* Handle B, BL, BLX(1) insns. */
12457 /* B simply branches so we do nothing here. */
12458 /* Note: BLX(1) doesnt fall here but instead it falls into
12459 extension space. */
12460 if (bit (arm_insn_r->arm_insn, 24))
12461 {
12462 record_buf[0] = ARM_LR_REGNUM;
12463 arm_insn_r->reg_rec_count = 1;
12464 }
12465
12466 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12467
12468 return 0;
12469 }
12470
12471 static int
12472 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12473 {
12474 gdb_printf (gdb_stderr,
12475 _("Process record does not support instruction "
12476 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12477 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12478
12479 return -1;
12480 }
12481
12482 /* Record handler for vector data transfer instructions. */
12483
12484 static int
12485 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12486 {
12487 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12488 uint32_t record_buf[4];
12489
12490 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12491 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12492 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12493 bit_l = bit (arm_insn_r->arm_insn, 20);
12494 bit_c = bit (arm_insn_r->arm_insn, 8);
12495
12496 /* Handle VMOV instruction. */
12497 if (bit_l && bit_c)
12498 {
12499 record_buf[0] = reg_t;
12500 arm_insn_r->reg_rec_count = 1;
12501 }
12502 else if (bit_l && !bit_c)
12503 {
12504 /* Handle VMOV instruction. */
12505 if (bits_a == 0x00)
12506 {
12507 record_buf[0] = reg_t;
12508 arm_insn_r->reg_rec_count = 1;
12509 }
12510 /* Handle VMRS instruction. */
12511 else if (bits_a == 0x07)
12512 {
12513 if (reg_t == 15)
12514 reg_t = ARM_PS_REGNUM;
12515
12516 record_buf[0] = reg_t;
12517 arm_insn_r->reg_rec_count = 1;
12518 }
12519 }
12520 else if (!bit_l && !bit_c)
12521 {
12522 /* Handle VMOV instruction. */
12523 if (bits_a == 0x00)
12524 {
12525 record_buf[0] = ARM_D0_REGNUM + reg_v;
12526
12527 arm_insn_r->reg_rec_count = 1;
12528 }
12529 /* Handle VMSR instruction. */
12530 else if (bits_a == 0x07)
12531 {
12532 record_buf[0] = ARM_FPSCR_REGNUM;
12533 arm_insn_r->reg_rec_count = 1;
12534 }
12535 }
12536 else if (!bit_l && bit_c)
12537 {
12538 /* Handle VMOV instruction. */
12539 if (!(bits_a & 0x04))
12540 {
12541 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12542 + ARM_D0_REGNUM;
12543 arm_insn_r->reg_rec_count = 1;
12544 }
12545 /* Handle VDUP instruction. */
12546 else
12547 {
12548 if (bit (arm_insn_r->arm_insn, 21))
12549 {
12550 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12551 record_buf[0] = reg_v + ARM_D0_REGNUM;
12552 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12553 arm_insn_r->reg_rec_count = 2;
12554 }
12555 else
12556 {
12557 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12558 record_buf[0] = reg_v + ARM_D0_REGNUM;
12559 arm_insn_r->reg_rec_count = 1;
12560 }
12561 }
12562 }
12563
12564 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12565 return 0;
12566 }
12567
12568 /* Record handler for extension register load/store instructions. */
12569
12570 static int
12571 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12572 {
12573 uint32_t opcode, single_reg;
12574 uint8_t op_vldm_vstm;
12575 uint32_t record_buf[8], record_buf_mem[128];
12576 ULONGEST u_regval = 0;
12577
12578 struct regcache *reg_cache = arm_insn_r->regcache;
12579
12580 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12581 single_reg = !bit (arm_insn_r->arm_insn, 8);
12582 op_vldm_vstm = opcode & 0x1b;
12583
12584 /* Handle VMOV instructions. */
12585 if ((opcode & 0x1e) == 0x04)
12586 {
12587 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12588 {
12589 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12590 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12591 arm_insn_r->reg_rec_count = 2;
12592 }
12593 else
12594 {
12595 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12596 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12597
12598 if (single_reg)
12599 {
12600 /* The first S register number m is REG_M:M (M is bit 5),
12601 the corresponding D register number is REG_M:M / 2, which
12602 is REG_M. */
12603 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12604 /* The second S register number is REG_M:M + 1, the
12605 corresponding D register number is (REG_M:M + 1) / 2.
12606 IOW, if bit M is 1, the first and second S registers
12607 are mapped to different D registers, otherwise, they are
12608 in the same D register. */
12609 if (bit_m)
12610 {
12611 record_buf[arm_insn_r->reg_rec_count++]
12612 = ARM_D0_REGNUM + reg_m + 1;
12613 }
12614 }
12615 else
12616 {
12617 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12618 arm_insn_r->reg_rec_count = 1;
12619 }
12620 }
12621 }
12622 /* Handle VSTM and VPUSH instructions. */
12623 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12624 || op_vldm_vstm == 0x12)
12625 {
12626 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12627 uint32_t memory_index = 0;
12628
12629 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12630 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12631 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12632 imm_off32 = imm_off8 << 2;
12633 memory_count = imm_off8;
12634
12635 if (bit (arm_insn_r->arm_insn, 23))
12636 start_address = u_regval;
12637 else
12638 start_address = u_regval - imm_off32;
12639
12640 if (bit (arm_insn_r->arm_insn, 21))
12641 {
12642 record_buf[0] = reg_rn;
12643 arm_insn_r->reg_rec_count = 1;
12644 }
12645
12646 while (memory_count > 0)
12647 {
12648 if (single_reg)
12649 {
12650 record_buf_mem[memory_index] = 4;
12651 record_buf_mem[memory_index + 1] = start_address;
12652 start_address = start_address + 4;
12653 memory_index = memory_index + 2;
12654 }
12655 else
12656 {
12657 record_buf_mem[memory_index] = 4;
12658 record_buf_mem[memory_index + 1] = start_address;
12659 record_buf_mem[memory_index + 2] = 4;
12660 record_buf_mem[memory_index + 3] = start_address + 4;
12661 start_address = start_address + 8;
12662 memory_index = memory_index + 4;
12663 }
12664 memory_count--;
12665 }
12666 arm_insn_r->mem_rec_count = (memory_index >> 1);
12667 }
12668 /* Handle VLDM instructions. */
12669 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12670 || op_vldm_vstm == 0x13)
12671 {
12672 uint32_t reg_count, reg_vd;
12673 uint32_t reg_index = 0;
12674 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12675
12676 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12677 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12678
12679 /* REG_VD is the first D register number. If the instruction
12680 loads memory to S registers (SINGLE_REG is TRUE), the register
12681 number is (REG_VD << 1 | bit D), so the corresponding D
12682 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12683 if (!single_reg)
12684 reg_vd = reg_vd | (bit_d << 4);
12685
12686 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12687 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12688
12689 /* If the instruction loads memory to D register, REG_COUNT should
12690 be divided by 2, according to the ARM Architecture Reference
12691 Manual. If the instruction loads memory to S register, divide by
12692 2 as well because two S registers are mapped to D register. */
12693 reg_count = reg_count / 2;
12694 if (single_reg && bit_d)
12695 {
12696 /* Increase the register count if S register list starts from
12697 an odd number (bit d is one). */
12698 reg_count++;
12699 }
12700
12701 while (reg_count > 0)
12702 {
12703 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12704 reg_count--;
12705 }
12706 arm_insn_r->reg_rec_count = reg_index;
12707 }
12708 /* VSTR Vector store register. */
12709 else if ((opcode & 0x13) == 0x10)
12710 {
12711 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12712 uint32_t memory_index = 0;
12713
12714 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12715 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12716 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12717 imm_off32 = imm_off8 << 2;
12718
12719 if (bit (arm_insn_r->arm_insn, 23))
12720 start_address = u_regval + imm_off32;
12721 else
12722 start_address = u_regval - imm_off32;
12723
12724 if (single_reg)
12725 {
12726 record_buf_mem[memory_index] = 4;
12727 record_buf_mem[memory_index + 1] = start_address;
12728 arm_insn_r->mem_rec_count = 1;
12729 }
12730 else
12731 {
12732 record_buf_mem[memory_index] = 4;
12733 record_buf_mem[memory_index + 1] = start_address;
12734 record_buf_mem[memory_index + 2] = 4;
12735 record_buf_mem[memory_index + 3] = start_address + 4;
12736 arm_insn_r->mem_rec_count = 2;
12737 }
12738 }
12739 /* VLDR Vector load register. */
12740 else if ((opcode & 0x13) == 0x11)
12741 {
12742 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12743
12744 if (!single_reg)
12745 {
12746 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12747 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12748 }
12749 else
12750 {
12751 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12752 /* Record register D rather than pseudo register S. */
12753 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12754 }
12755 arm_insn_r->reg_rec_count = 1;
12756 }
12757
12758 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12759 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12760 return 0;
12761 }
12762
12763 /* Record handler for arm/thumb mode VFP data processing instructions. */
12764
12765 static int
12766 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12767 {
12768 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12769 uint32_t record_buf[4];
12770 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12771 enum insn_types curr_insn_type = INSN_INV;
12772
12773 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12774 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12775 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12776 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12777 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12778 bit_d = bit (arm_insn_r->arm_insn, 22);
12779 /* Mask off the "D" bit. */
12780 opc1 = opc1 & ~0x04;
12781
12782 /* Handle VMLA, VMLS. */
12783 if (opc1 == 0x00)
12784 {
12785 if (bit (arm_insn_r->arm_insn, 10))
12786 {
12787 if (bit (arm_insn_r->arm_insn, 6))
12788 curr_insn_type = INSN_T0;
12789 else
12790 curr_insn_type = INSN_T1;
12791 }
12792 else
12793 {
12794 if (dp_op_sz)
12795 curr_insn_type = INSN_T1;
12796 else
12797 curr_insn_type = INSN_T2;
12798 }
12799 }
12800 /* Handle VNMLA, VNMLS, VNMUL. */
12801 else if (opc1 == 0x01)
12802 {
12803 if (dp_op_sz)
12804 curr_insn_type = INSN_T1;
12805 else
12806 curr_insn_type = INSN_T2;
12807 }
12808 /* Handle VMUL. */
12809 else if (opc1 == 0x02 && !(opc3 & 0x01))
12810 {
12811 if (bit (arm_insn_r->arm_insn, 10))
12812 {
12813 if (bit (arm_insn_r->arm_insn, 6))
12814 curr_insn_type = INSN_T0;
12815 else
12816 curr_insn_type = INSN_T1;
12817 }
12818 else
12819 {
12820 if (dp_op_sz)
12821 curr_insn_type = INSN_T1;
12822 else
12823 curr_insn_type = INSN_T2;
12824 }
12825 }
12826 /* Handle VADD, VSUB. */
12827 else if (opc1 == 0x03)
12828 {
12829 if (!bit (arm_insn_r->arm_insn, 9))
12830 {
12831 if (bit (arm_insn_r->arm_insn, 6))
12832 curr_insn_type = INSN_T0;
12833 else
12834 curr_insn_type = INSN_T1;
12835 }
12836 else
12837 {
12838 if (dp_op_sz)
12839 curr_insn_type = INSN_T1;
12840 else
12841 curr_insn_type = INSN_T2;
12842 }
12843 }
12844 /* Handle VDIV. */
12845 else if (opc1 == 0x08)
12846 {
12847 if (dp_op_sz)
12848 curr_insn_type = INSN_T1;
12849 else
12850 curr_insn_type = INSN_T2;
12851 }
12852 /* Handle all other vfp data processing instructions. */
12853 else if (opc1 == 0x0b)
12854 {
12855 /* Handle VMOV. */
12856 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12857 {
12858 if (bit (arm_insn_r->arm_insn, 4))
12859 {
12860 if (bit (arm_insn_r->arm_insn, 6))
12861 curr_insn_type = INSN_T0;
12862 else
12863 curr_insn_type = INSN_T1;
12864 }
12865 else
12866 {
12867 if (dp_op_sz)
12868 curr_insn_type = INSN_T1;
12869 else
12870 curr_insn_type = INSN_T2;
12871 }
12872 }
12873 /* Handle VNEG and VABS. */
12874 else if ((opc2 == 0x01 && opc3 == 0x01)
12875 || (opc2 == 0x00 && opc3 == 0x03))
12876 {
12877 if (!bit (arm_insn_r->arm_insn, 11))
12878 {
12879 if (bit (arm_insn_r->arm_insn, 6))
12880 curr_insn_type = INSN_T0;
12881 else
12882 curr_insn_type = INSN_T1;
12883 }
12884 else
12885 {
12886 if (dp_op_sz)
12887 curr_insn_type = INSN_T1;
12888 else
12889 curr_insn_type = INSN_T2;
12890 }
12891 }
12892 /* Handle VSQRT. */
12893 else if (opc2 == 0x01 && opc3 == 0x03)
12894 {
12895 if (dp_op_sz)
12896 curr_insn_type = INSN_T1;
12897 else
12898 curr_insn_type = INSN_T2;
12899 }
12900 /* Handle VCVT. */
12901 else if (opc2 == 0x07 && opc3 == 0x03)
12902 {
12903 if (!dp_op_sz)
12904 curr_insn_type = INSN_T1;
12905 else
12906 curr_insn_type = INSN_T2;
12907 }
12908 else if (opc3 & 0x01)
12909 {
12910 /* Handle VCVT. */
12911 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12912 {
12913 if (!bit (arm_insn_r->arm_insn, 18))
12914 curr_insn_type = INSN_T2;
12915 else
12916 {
12917 if (dp_op_sz)
12918 curr_insn_type = INSN_T1;
12919 else
12920 curr_insn_type = INSN_T2;
12921 }
12922 }
12923 /* Handle VCVT. */
12924 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12925 {
12926 if (dp_op_sz)
12927 curr_insn_type = INSN_T1;
12928 else
12929 curr_insn_type = INSN_T2;
12930 }
12931 /* Handle VCVTB, VCVTT. */
12932 else if ((opc2 & 0x0e) == 0x02)
12933 curr_insn_type = INSN_T2;
12934 /* Handle VCMP, VCMPE. */
12935 else if ((opc2 & 0x0e) == 0x04)
12936 curr_insn_type = INSN_T3;
12937 }
12938 }
12939
12940 switch (curr_insn_type)
12941 {
12942 case INSN_T0:
12943 reg_vd = reg_vd | (bit_d << 4);
12944 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12945 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12946 arm_insn_r->reg_rec_count = 2;
12947 break;
12948
12949 case INSN_T1:
12950 reg_vd = reg_vd | (bit_d << 4);
12951 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12952 arm_insn_r->reg_rec_count = 1;
12953 break;
12954
12955 case INSN_T2:
12956 reg_vd = (reg_vd << 1) | bit_d;
12957 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12958 arm_insn_r->reg_rec_count = 1;
12959 break;
12960
12961 case INSN_T3:
12962 record_buf[0] = ARM_FPSCR_REGNUM;
12963 arm_insn_r->reg_rec_count = 1;
12964 break;
12965
12966 default:
12967 gdb_assert_not_reached ("no decoding pattern found");
12968 break;
12969 }
12970
12971 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12972 return 0;
12973 }
12974
12975 /* Handling opcode 110 insns. */
12976
12977 static int
12978 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
12979 {
12980 uint32_t op1, op1_ebit, coproc;
12981
12982 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12983 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12984 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12985
12986 if ((coproc & 0x0e) == 0x0a)
12987 {
12988 /* Handle extension register ld/st instructions. */
12989 if (!(op1 & 0x20))
12990 return arm_record_exreg_ld_st_insn (arm_insn_r);
12991
12992 /* 64-bit transfers between arm core and extension registers. */
12993 if ((op1 & 0x3e) == 0x04)
12994 return arm_record_exreg_ld_st_insn (arm_insn_r);
12995 }
12996 else
12997 {
12998 /* Handle coprocessor ld/st instructions. */
12999 if (!(op1 & 0x3a))
13000 {
13001 /* Store. */
13002 if (!op1_ebit)
13003 return arm_record_unsupported_insn (arm_insn_r);
13004 else
13005 /* Load. */
13006 return arm_record_unsupported_insn (arm_insn_r);
13007 }
13008
13009 /* Move to coprocessor from two arm core registers. */
13010 if (op1 == 0x4)
13011 return arm_record_unsupported_insn (arm_insn_r);
13012
13013 /* Move to two arm core registers from coprocessor. */
13014 if (op1 == 0x5)
13015 {
13016 uint32_t reg_t[2];
13017
13018 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13019 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13020 arm_insn_r->reg_rec_count = 2;
13021
13022 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13023 return 0;
13024 }
13025 }
13026 return arm_record_unsupported_insn (arm_insn_r);
13027 }
13028
13029 /* Handling opcode 111 insns. */
13030
13031 static int
13032 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13033 {
13034 uint32_t op, op1_ebit, coproc, bits_24_25;
13035 arm_gdbarch_tdep *tdep
13036 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13037 struct regcache *reg_cache = arm_insn_r->regcache;
13038
13039 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13040 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13041 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13042 op = bit (arm_insn_r->arm_insn, 4);
13043 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13044
13045 /* Handle arm SWI/SVC system call instructions. */
13046 if (bits_24_25 == 0x3)
13047 {
13048 if (tdep->arm_syscall_record != NULL)
13049 {
13050 ULONGEST svc_operand, svc_number;
13051
13052 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13053
13054 if (svc_operand) /* OABI. */
13055 svc_number = svc_operand - 0x900000;
13056 else /* EABI. */
13057 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13058
13059 return tdep->arm_syscall_record (reg_cache, svc_number);
13060 }
13061 else
13062 {
13063 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13064 return -1;
13065 }
13066 }
13067 else if (bits_24_25 == 0x02)
13068 {
13069 if (op)
13070 {
13071 if ((coproc & 0x0e) == 0x0a)
13072 {
13073 /* 8, 16, and 32-bit transfer */
13074 return arm_record_vdata_transfer_insn (arm_insn_r);
13075 }
13076 else
13077 {
13078 if (op1_ebit)
13079 {
13080 /* MRC, MRC2 */
13081 uint32_t record_buf[1];
13082
13083 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13084 if (record_buf[0] == 15)
13085 record_buf[0] = ARM_PS_REGNUM;
13086
13087 arm_insn_r->reg_rec_count = 1;
13088 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13089 record_buf);
13090 return 0;
13091 }
13092 else
13093 {
13094 /* MCR, MCR2 */
13095 return -1;
13096 }
13097 }
13098 }
13099 else
13100 {
13101 if ((coproc & 0x0e) == 0x0a)
13102 {
13103 /* VFP data-processing instructions. */
13104 return arm_record_vfp_data_proc_insn (arm_insn_r);
13105 }
13106 else
13107 {
13108 /* CDP, CDP2 */
13109 return -1;
13110 }
13111 }
13112 }
13113 else
13114 {
13115 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13116
13117 if (op1 == 5)
13118 {
13119 if ((coproc & 0x0e) != 0x0a)
13120 {
13121 /* MRRC, MRRC2 */
13122 return -1;
13123 }
13124 }
13125 else if (op1 == 4 || op1 == 5)
13126 {
13127 if ((coproc & 0x0e) == 0x0a)
13128 {
13129 /* 64-bit transfers between ARM core and extension */
13130 return -1;
13131 }
13132 else if (op1 == 4)
13133 {
13134 /* MCRR, MCRR2 */
13135 return -1;
13136 }
13137 }
13138 else if (op1 == 0 || op1 == 1)
13139 {
13140 /* UNDEFINED */
13141 return -1;
13142 }
13143 else
13144 {
13145 if ((coproc & 0x0e) == 0x0a)
13146 {
13147 /* Extension register load/store */
13148 }
13149 else
13150 {
13151 /* STC, STC2, LDC, LDC2 */
13152 }
13153 return -1;
13154 }
13155 }
13156
13157 return -1;
13158 }
13159
13160 /* Handling opcode 000 insns. */
13161
13162 static int
13163 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13164 {
13165 uint32_t record_buf[8];
13166 uint32_t reg_src1 = 0;
13167
13168 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13169
13170 record_buf[0] = ARM_PS_REGNUM;
13171 record_buf[1] = reg_src1;
13172 thumb_insn_r->reg_rec_count = 2;
13173
13174 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13175
13176 return 0;
13177 }
13178
13179
13180 /* Handling opcode 001 insns. */
13181
13182 static int
13183 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13184 {
13185 uint32_t record_buf[8];
13186 uint32_t reg_src1 = 0;
13187
13188 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13189
13190 record_buf[0] = ARM_PS_REGNUM;
13191 record_buf[1] = reg_src1;
13192 thumb_insn_r->reg_rec_count = 2;
13193
13194 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13195
13196 return 0;
13197 }
13198
13199 /* Handling opcode 010 insns. */
13200
13201 static int
13202 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13203 {
13204 struct regcache *reg_cache = thumb_insn_r->regcache;
13205 uint32_t record_buf[8], record_buf_mem[8];
13206
13207 uint32_t reg_src1 = 0, reg_src2 = 0;
13208 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13209
13210 ULONGEST u_regval[2] = {0};
13211
13212 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13213
13214 if (bit (thumb_insn_r->arm_insn, 12))
13215 {
13216 /* Handle load/store register offset. */
13217 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13218
13219 if (in_inclusive_range (opB, 4U, 7U))
13220 {
13221 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13222 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13223 record_buf[0] = reg_src1;
13224 thumb_insn_r->reg_rec_count = 1;
13225 }
13226 else if (in_inclusive_range (opB, 0U, 2U))
13227 {
13228 /* STR(2), STRB(2), STRH(2) . */
13229 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13230 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13231 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13232 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13233 if (0 == opB)
13234 record_buf_mem[0] = 4; /* STR (2). */
13235 else if (2 == opB)
13236 record_buf_mem[0] = 1; /* STRB (2). */
13237 else if (1 == opB)
13238 record_buf_mem[0] = 2; /* STRH (2). */
13239 record_buf_mem[1] = u_regval[0] + u_regval[1];
13240 thumb_insn_r->mem_rec_count = 1;
13241 }
13242 }
13243 else if (bit (thumb_insn_r->arm_insn, 11))
13244 {
13245 /* Handle load from literal pool. */
13246 /* LDR(3). */
13247 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13248 record_buf[0] = reg_src1;
13249 thumb_insn_r->reg_rec_count = 1;
13250 }
13251 else if (opcode1)
13252 {
13253 /* Special data instructions and branch and exchange */
13254 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13255 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13256 if ((3 == opcode2) && (!opcode3))
13257 {
13258 /* Branch with exchange. */
13259 record_buf[0] = ARM_PS_REGNUM;
13260 thumb_insn_r->reg_rec_count = 1;
13261 }
13262 else
13263 {
13264 /* Format 8; special data processing insns. */
13265 record_buf[0] = ARM_PS_REGNUM;
13266 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13267 | bits (thumb_insn_r->arm_insn, 0, 2));
13268 thumb_insn_r->reg_rec_count = 2;
13269 }
13270 }
13271 else
13272 {
13273 /* Format 5; data processing insns. */
13274 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13275 if (bit (thumb_insn_r->arm_insn, 7))
13276 {
13277 reg_src1 = reg_src1 + 8;
13278 }
13279 record_buf[0] = ARM_PS_REGNUM;
13280 record_buf[1] = reg_src1;
13281 thumb_insn_r->reg_rec_count = 2;
13282 }
13283
13284 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13285 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13286 record_buf_mem);
13287
13288 return 0;
13289 }
13290
13291 /* Handling opcode 001 insns. */
13292
13293 static int
13294 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13295 {
13296 struct regcache *reg_cache = thumb_insn_r->regcache;
13297 uint32_t record_buf[8], record_buf_mem[8];
13298
13299 uint32_t reg_src1 = 0;
13300 uint32_t opcode = 0, immed_5 = 0;
13301
13302 ULONGEST u_regval = 0;
13303
13304 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13305
13306 if (opcode)
13307 {
13308 /* LDR(1). */
13309 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13310 record_buf[0] = reg_src1;
13311 thumb_insn_r->reg_rec_count = 1;
13312 }
13313 else
13314 {
13315 /* STR(1). */
13316 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13317 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13318 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13319 record_buf_mem[0] = 4;
13320 record_buf_mem[1] = u_regval + (immed_5 * 4);
13321 thumb_insn_r->mem_rec_count = 1;
13322 }
13323
13324 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13325 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13326 record_buf_mem);
13327
13328 return 0;
13329 }
13330
13331 /* Handling opcode 100 insns. */
13332
13333 static int
13334 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13335 {
13336 struct regcache *reg_cache = thumb_insn_r->regcache;
13337 uint32_t record_buf[8], record_buf_mem[8];
13338
13339 uint32_t reg_src1 = 0;
13340 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13341
13342 ULONGEST u_regval = 0;
13343
13344 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13345
13346 if (3 == opcode)
13347 {
13348 /* LDR(4). */
13349 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13350 record_buf[0] = reg_src1;
13351 thumb_insn_r->reg_rec_count = 1;
13352 }
13353 else if (1 == opcode)
13354 {
13355 /* LDRH(1). */
13356 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13357 record_buf[0] = reg_src1;
13358 thumb_insn_r->reg_rec_count = 1;
13359 }
13360 else if (2 == opcode)
13361 {
13362 /* STR(3). */
13363 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13364 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13365 record_buf_mem[0] = 4;
13366 record_buf_mem[1] = u_regval + (immed_8 * 4);
13367 thumb_insn_r->mem_rec_count = 1;
13368 }
13369 else if (0 == opcode)
13370 {
13371 /* STRH(1). */
13372 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13373 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13374 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13375 record_buf_mem[0] = 2;
13376 record_buf_mem[1] = u_regval + (immed_5 * 2);
13377 thumb_insn_r->mem_rec_count = 1;
13378 }
13379
13380 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13381 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13382 record_buf_mem);
13383
13384 return 0;
13385 }
13386
13387 /* Handling opcode 101 insns. */
13388
13389 static int
13390 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13391 {
13392 struct regcache *reg_cache = thumb_insn_r->regcache;
13393
13394 uint32_t opcode = 0;
13395 uint32_t register_bits = 0, register_count = 0;
13396 uint32_t index = 0, start_address = 0;
13397 uint32_t record_buf[24], record_buf_mem[48];
13398 uint32_t reg_src1;
13399
13400 ULONGEST u_regval = 0;
13401
13402 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13403
13404 if (opcode == 0 || opcode == 1)
13405 {
13406 /* ADR and ADD (SP plus immediate) */
13407
13408 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13409 record_buf[0] = reg_src1;
13410 thumb_insn_r->reg_rec_count = 1;
13411 }
13412 else
13413 {
13414 /* Miscellaneous 16-bit instructions */
13415 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13416
13417 switch (opcode2)
13418 {
13419 case 6:
13420 /* SETEND and CPS */
13421 break;
13422 case 0:
13423 /* ADD/SUB (SP plus immediate) */
13424 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13425 record_buf[0] = ARM_SP_REGNUM;
13426 thumb_insn_r->reg_rec_count = 1;
13427 break;
13428 case 1: /* fall through */
13429 case 3: /* fall through */
13430 case 9: /* fall through */
13431 case 11:
13432 /* CBNZ, CBZ */
13433 break;
13434 case 2:
13435 /* SXTH, SXTB, UXTH, UXTB */
13436 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13437 thumb_insn_r->reg_rec_count = 1;
13438 break;
13439 case 4: /* fall through */
13440 case 5:
13441 /* PUSH. */
13442 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13443 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13444 while (register_bits)
13445 {
13446 if (register_bits & 0x00000001)
13447 register_count++;
13448 register_bits = register_bits >> 1;
13449 }
13450 start_address = u_regval - \
13451 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13452 thumb_insn_r->mem_rec_count = register_count;
13453 while (register_count)
13454 {
13455 record_buf_mem[(register_count * 2) - 1] = start_address;
13456 record_buf_mem[(register_count * 2) - 2] = 4;
13457 start_address = start_address + 4;
13458 register_count--;
13459 }
13460 record_buf[0] = ARM_SP_REGNUM;
13461 thumb_insn_r->reg_rec_count = 1;
13462 break;
13463 case 10:
13464 /* REV, REV16, REVSH */
13465 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13466 thumb_insn_r->reg_rec_count = 1;
13467 break;
13468 case 12: /* fall through */
13469 case 13:
13470 /* POP. */
13471 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13472 while (register_bits)
13473 {
13474 if (register_bits & 0x00000001)
13475 record_buf[index++] = register_count;
13476 register_bits = register_bits >> 1;
13477 register_count++;
13478 }
13479 record_buf[index++] = ARM_PS_REGNUM;
13480 record_buf[index++] = ARM_SP_REGNUM;
13481 thumb_insn_r->reg_rec_count = index;
13482 break;
13483 case 0xe:
13484 /* BKPT insn. */
13485 /* Handle enhanced software breakpoint insn, BKPT. */
13486 /* CPSR is changed to be executed in ARM state, disabling normal
13487 interrupts, entering abort mode. */
13488 /* According to high vector configuration PC is set. */
13489 /* User hits breakpoint and type reverse, in that case, we need to go back with
13490 previous CPSR and Program Counter. */
13491 record_buf[0] = ARM_PS_REGNUM;
13492 record_buf[1] = ARM_LR_REGNUM;
13493 thumb_insn_r->reg_rec_count = 2;
13494 /* We need to save SPSR value, which is not yet done. */
13495 gdb_printf (gdb_stderr,
13496 _("Process record does not support instruction "
13497 "0x%0x at address %s.\n"),
13498 thumb_insn_r->arm_insn,
13499 paddress (thumb_insn_r->gdbarch,
13500 thumb_insn_r->this_addr));
13501 return -1;
13502
13503 case 0xf:
13504 /* If-Then, and hints */
13505 break;
13506 default:
13507 return -1;
13508 };
13509 }
13510
13511 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13512 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13513 record_buf_mem);
13514
13515 return 0;
13516 }
13517
13518 /* Handling opcode 110 insns. */
13519
13520 static int
13521 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13522 {
13523 arm_gdbarch_tdep *tdep
13524 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13525 struct regcache *reg_cache = thumb_insn_r->regcache;
13526
13527 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13528 uint32_t reg_src1 = 0;
13529 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13530 uint32_t index = 0, start_address = 0;
13531 uint32_t record_buf[24], record_buf_mem[48];
13532
13533 ULONGEST u_regval = 0;
13534
13535 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13536 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13537
13538 if (1 == opcode2)
13539 {
13540
13541 /* LDMIA. */
13542 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13543 /* Get Rn. */
13544 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13545 while (register_bits)
13546 {
13547 if (register_bits & 0x00000001)
13548 record_buf[index++] = register_count;
13549 register_bits = register_bits >> 1;
13550 register_count++;
13551 }
13552 record_buf[index++] = reg_src1;
13553 thumb_insn_r->reg_rec_count = index;
13554 }
13555 else if (0 == opcode2)
13556 {
13557 /* It handles both STMIA. */
13558 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13559 /* Get Rn. */
13560 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13561 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13562 while (register_bits)
13563 {
13564 if (register_bits & 0x00000001)
13565 register_count++;
13566 register_bits = register_bits >> 1;
13567 }
13568 start_address = u_regval;
13569 thumb_insn_r->mem_rec_count = register_count;
13570 while (register_count)
13571 {
13572 record_buf_mem[(register_count * 2) - 1] = start_address;
13573 record_buf_mem[(register_count * 2) - 2] = 4;
13574 start_address = start_address + 4;
13575 register_count--;
13576 }
13577 }
13578 else if (0x1F == opcode1)
13579 {
13580 /* Handle arm syscall insn. */
13581 if (tdep->arm_syscall_record != NULL)
13582 {
13583 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13584 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13585 }
13586 else
13587 {
13588 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13589 return -1;
13590 }
13591 }
13592
13593 /* B (1), conditional branch is automatically taken care in process_record,
13594 as PC is saved there. */
13595
13596 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13597 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13598 record_buf_mem);
13599
13600 return ret;
13601 }
13602
13603 /* Handling opcode 111 insns. */
13604
13605 static int
13606 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13607 {
13608 uint32_t record_buf[8];
13609 uint32_t bits_h = 0;
13610
13611 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13612
13613 if (2 == bits_h || 3 == bits_h)
13614 {
13615 /* BL */
13616 record_buf[0] = ARM_LR_REGNUM;
13617 thumb_insn_r->reg_rec_count = 1;
13618 }
13619 else if (1 == bits_h)
13620 {
13621 /* BLX(1). */
13622 record_buf[0] = ARM_PS_REGNUM;
13623 record_buf[1] = ARM_LR_REGNUM;
13624 thumb_insn_r->reg_rec_count = 2;
13625 }
13626
13627 /* B(2) is automatically taken care in process_record, as PC is
13628 saved there. */
13629
13630 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13631
13632 return 0;
13633 }
13634
13635 /* Handler for thumb2 load/store multiple instructions. */
13636
13637 static int
13638 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13639 {
13640 struct regcache *reg_cache = thumb2_insn_r->regcache;
13641
13642 uint32_t reg_rn, op;
13643 uint32_t register_bits = 0, register_count = 0;
13644 uint32_t index = 0, start_address = 0;
13645 uint32_t record_buf[24], record_buf_mem[48];
13646
13647 ULONGEST u_regval = 0;
13648
13649 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13650 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13651
13652 if (0 == op || 3 == op)
13653 {
13654 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13655 {
13656 /* Handle RFE instruction. */
13657 record_buf[0] = ARM_PS_REGNUM;
13658 thumb2_insn_r->reg_rec_count = 1;
13659 }
13660 else
13661 {
13662 /* Handle SRS instruction after reading banked SP. */
13663 return arm_record_unsupported_insn (thumb2_insn_r);
13664 }
13665 }
13666 else if (1 == op || 2 == op)
13667 {
13668 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13669 {
13670 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13671 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13672 while (register_bits)
13673 {
13674 if (register_bits & 0x00000001)
13675 record_buf[index++] = register_count;
13676
13677 register_count++;
13678 register_bits = register_bits >> 1;
13679 }
13680 record_buf[index++] = reg_rn;
13681 record_buf[index++] = ARM_PS_REGNUM;
13682 thumb2_insn_r->reg_rec_count = index;
13683 }
13684 else
13685 {
13686 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13687 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13688 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13689 while (register_bits)
13690 {
13691 if (register_bits & 0x00000001)
13692 register_count++;
13693
13694 register_bits = register_bits >> 1;
13695 }
13696
13697 if (1 == op)
13698 {
13699 /* Start address calculation for LDMDB/LDMEA. */
13700 start_address = u_regval;
13701 }
13702 else if (2 == op)
13703 {
13704 /* Start address calculation for LDMDB/LDMEA. */
13705 start_address = u_regval - register_count * 4;
13706 }
13707
13708 thumb2_insn_r->mem_rec_count = register_count;
13709 while (register_count)
13710 {
13711 record_buf_mem[register_count * 2 - 1] = start_address;
13712 record_buf_mem[register_count * 2 - 2] = 4;
13713 start_address = start_address + 4;
13714 register_count--;
13715 }
13716 record_buf[0] = reg_rn;
13717 record_buf[1] = ARM_PS_REGNUM;
13718 thumb2_insn_r->reg_rec_count = 2;
13719 }
13720 }
13721
13722 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13723 record_buf_mem);
13724 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13725 record_buf);
13726 return ARM_RECORD_SUCCESS;
13727 }
13728
13729 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13730 instructions. */
13731
13732 static int
13733 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13734 {
13735 struct regcache *reg_cache = thumb2_insn_r->regcache;
13736
13737 uint32_t reg_rd, reg_rn, offset_imm;
13738 uint32_t reg_dest1, reg_dest2;
13739 uint32_t address, offset_addr;
13740 uint32_t record_buf[8], record_buf_mem[8];
13741 uint32_t op1, op2, op3;
13742
13743 ULONGEST u_regval[2];
13744
13745 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13746 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13747 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13748
13749 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13750 {
13751 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13752 {
13753 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13754 record_buf[0] = reg_dest1;
13755 record_buf[1] = ARM_PS_REGNUM;
13756 thumb2_insn_r->reg_rec_count = 2;
13757 }
13758
13759 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13760 {
13761 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13762 record_buf[2] = reg_dest2;
13763 thumb2_insn_r->reg_rec_count = 3;
13764 }
13765 }
13766 else
13767 {
13768 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13769 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13770
13771 if (0 == op1 && 0 == op2)
13772 {
13773 /* Handle STREX. */
13774 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13775 address = u_regval[0] + (offset_imm * 4);
13776 record_buf_mem[0] = 4;
13777 record_buf_mem[1] = address;
13778 thumb2_insn_r->mem_rec_count = 1;
13779 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13780 record_buf[0] = reg_rd;
13781 thumb2_insn_r->reg_rec_count = 1;
13782 }
13783 else if (1 == op1 && 0 == op2)
13784 {
13785 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13786 record_buf[0] = reg_rd;
13787 thumb2_insn_r->reg_rec_count = 1;
13788 address = u_regval[0];
13789 record_buf_mem[1] = address;
13790
13791 if (4 == op3)
13792 {
13793 /* Handle STREXB. */
13794 record_buf_mem[0] = 1;
13795 thumb2_insn_r->mem_rec_count = 1;
13796 }
13797 else if (5 == op3)
13798 {
13799 /* Handle STREXH. */
13800 record_buf_mem[0] = 2 ;
13801 thumb2_insn_r->mem_rec_count = 1;
13802 }
13803 else if (7 == op3)
13804 {
13805 /* Handle STREXD. */
13806 address = u_regval[0];
13807 record_buf_mem[0] = 4;
13808 record_buf_mem[2] = 4;
13809 record_buf_mem[3] = address + 4;
13810 thumb2_insn_r->mem_rec_count = 2;
13811 }
13812 }
13813 else
13814 {
13815 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13816
13817 if (bit (thumb2_insn_r->arm_insn, 24))
13818 {
13819 if (bit (thumb2_insn_r->arm_insn, 23))
13820 offset_addr = u_regval[0] + (offset_imm * 4);
13821 else
13822 offset_addr = u_regval[0] - (offset_imm * 4);
13823
13824 address = offset_addr;
13825 }
13826 else
13827 address = u_regval[0];
13828
13829 record_buf_mem[0] = 4;
13830 record_buf_mem[1] = address;
13831 record_buf_mem[2] = 4;
13832 record_buf_mem[3] = address + 4;
13833 thumb2_insn_r->mem_rec_count = 2;
13834 record_buf[0] = reg_rn;
13835 thumb2_insn_r->reg_rec_count = 1;
13836 }
13837 }
13838
13839 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13840 record_buf);
13841 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13842 record_buf_mem);
13843 return ARM_RECORD_SUCCESS;
13844 }
13845
13846 /* Handler for thumb2 data processing (shift register and modified immediate)
13847 instructions. */
13848
13849 static int
13850 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13851 {
13852 uint32_t reg_rd, op;
13853 uint32_t record_buf[8];
13854
13855 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13856 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13857
13858 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13859 {
13860 record_buf[0] = ARM_PS_REGNUM;
13861 thumb2_insn_r->reg_rec_count = 1;
13862 }
13863 else
13864 {
13865 record_buf[0] = reg_rd;
13866 record_buf[1] = ARM_PS_REGNUM;
13867 thumb2_insn_r->reg_rec_count = 2;
13868 }
13869
13870 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13871 record_buf);
13872 return ARM_RECORD_SUCCESS;
13873 }
13874
13875 /* Generic handler for thumb2 instructions which effect destination and PS
13876 registers. */
13877
13878 static int
13879 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13880 {
13881 uint32_t reg_rd;
13882 uint32_t record_buf[8];
13883
13884 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13885
13886 record_buf[0] = reg_rd;
13887 record_buf[1] = ARM_PS_REGNUM;
13888 thumb2_insn_r->reg_rec_count = 2;
13889
13890 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13891 record_buf);
13892 return ARM_RECORD_SUCCESS;
13893 }
13894
13895 /* Handler for thumb2 branch and miscellaneous control instructions. */
13896
13897 static int
13898 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13899 {
13900 uint32_t op, op1, op2;
13901 uint32_t record_buf[8];
13902
13903 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13904 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13905 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13906
13907 /* Handle MSR insn. */
13908 if (!(op1 & 0x2) && 0x38 == op)
13909 {
13910 if (!(op2 & 0x3))
13911 {
13912 /* CPSR is going to be changed. */
13913 record_buf[0] = ARM_PS_REGNUM;
13914 thumb2_insn_r->reg_rec_count = 1;
13915 }
13916 else
13917 {
13918 arm_record_unsupported_insn(thumb2_insn_r);
13919 return -1;
13920 }
13921 }
13922 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13923 {
13924 /* BLX. */
13925 record_buf[0] = ARM_PS_REGNUM;
13926 record_buf[1] = ARM_LR_REGNUM;
13927 thumb2_insn_r->reg_rec_count = 2;
13928 }
13929
13930 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13931 record_buf);
13932 return ARM_RECORD_SUCCESS;
13933 }
13934
13935 /* Handler for thumb2 store single data item instructions. */
13936
13937 static int
13938 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
13939 {
13940 struct regcache *reg_cache = thumb2_insn_r->regcache;
13941
13942 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13943 uint32_t address, offset_addr;
13944 uint32_t record_buf[8], record_buf_mem[8];
13945 uint32_t op1, op2;
13946
13947 ULONGEST u_regval[2];
13948
13949 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13950 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13951 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13952 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13953
13954 if (bit (thumb2_insn_r->arm_insn, 23))
13955 {
13956 /* T2 encoding. */
13957 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13958 offset_addr = u_regval[0] + offset_imm;
13959 address = offset_addr;
13960 }
13961 else
13962 {
13963 /* T3 encoding. */
13964 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13965 {
13966 /* Handle STRB (register). */
13967 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13968 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13969 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13970 offset_addr = u_regval[1] << shift_imm;
13971 address = u_regval[0] + offset_addr;
13972 }
13973 else
13974 {
13975 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13976 if (bit (thumb2_insn_r->arm_insn, 10))
13977 {
13978 if (bit (thumb2_insn_r->arm_insn, 9))
13979 offset_addr = u_regval[0] + offset_imm;
13980 else
13981 offset_addr = u_regval[0] - offset_imm;
13982
13983 address = offset_addr;
13984 }
13985 else
13986 address = u_regval[0];
13987 }
13988 }
13989
13990 switch (op1)
13991 {
13992 /* Store byte instructions. */
13993 case 4:
13994 case 0:
13995 record_buf_mem[0] = 1;
13996 break;
13997 /* Store half word instructions. */
13998 case 1:
13999 case 5:
14000 record_buf_mem[0] = 2;
14001 break;
14002 /* Store word instructions. */
14003 case 2:
14004 case 6:
14005 record_buf_mem[0] = 4;
14006 break;
14007
14008 default:
14009 gdb_assert_not_reached ("no decoding pattern found");
14010 break;
14011 }
14012
14013 record_buf_mem[1] = address;
14014 thumb2_insn_r->mem_rec_count = 1;
14015 record_buf[0] = reg_rn;
14016 thumb2_insn_r->reg_rec_count = 1;
14017
14018 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14019 record_buf);
14020 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14021 record_buf_mem);
14022 return ARM_RECORD_SUCCESS;
14023 }
14024
14025 /* Handler for thumb2 load memory hints instructions. */
14026
14027 static int
14028 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14029 {
14030 uint32_t record_buf[8];
14031 uint32_t reg_rt, reg_rn;
14032
14033 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14034 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14035
14036 if (ARM_PC_REGNUM != reg_rt)
14037 {
14038 record_buf[0] = reg_rt;
14039 record_buf[1] = reg_rn;
14040 record_buf[2] = ARM_PS_REGNUM;
14041 thumb2_insn_r->reg_rec_count = 3;
14042
14043 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14044 record_buf);
14045 return ARM_RECORD_SUCCESS;
14046 }
14047
14048 return ARM_RECORD_FAILURE;
14049 }
14050
14051 /* Handler for thumb2 load word instructions. */
14052
14053 static int
14054 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14055 {
14056 uint32_t record_buf[8];
14057
14058 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14059 record_buf[1] = ARM_PS_REGNUM;
14060 thumb2_insn_r->reg_rec_count = 2;
14061
14062 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14063 record_buf);
14064 return ARM_RECORD_SUCCESS;
14065 }
14066
14067 /* Handler for thumb2 long multiply, long multiply accumulate, and
14068 divide instructions. */
14069
14070 static int
14071 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14072 {
14073 uint32_t opcode1 = 0, opcode2 = 0;
14074 uint32_t record_buf[8];
14075
14076 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14077 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14078
14079 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14080 {
14081 /* Handle SMULL, UMULL, SMULAL. */
14082 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14083 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14084 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14085 record_buf[2] = ARM_PS_REGNUM;
14086 thumb2_insn_r->reg_rec_count = 3;
14087 }
14088 else if (1 == opcode1 || 3 == opcode2)
14089 {
14090 /* Handle SDIV and UDIV. */
14091 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14092 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14093 record_buf[2] = ARM_PS_REGNUM;
14094 thumb2_insn_r->reg_rec_count = 3;
14095 }
14096 else
14097 return ARM_RECORD_FAILURE;
14098
14099 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14100 record_buf);
14101 return ARM_RECORD_SUCCESS;
14102 }
14103
14104 /* Record handler for thumb32 coprocessor instructions. */
14105
14106 static int
14107 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14108 {
14109 if (bit (thumb2_insn_r->arm_insn, 25))
14110 return arm_record_coproc_data_proc (thumb2_insn_r);
14111 else
14112 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14113 }
14114
14115 /* Record handler for advance SIMD structure load/store instructions. */
14116
14117 static int
14118 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14119 {
14120 struct regcache *reg_cache = thumb2_insn_r->regcache;
14121 uint32_t l_bit, a_bit, b_bits;
14122 uint32_t record_buf[128], record_buf_mem[128];
14123 uint32_t reg_rn, reg_vd, address, f_elem;
14124 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14125 uint8_t f_ebytes;
14126
14127 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14128 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14129 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14130 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14131 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14132 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14133 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14134 f_elem = 8 / f_ebytes;
14135
14136 if (!l_bit)
14137 {
14138 ULONGEST u_regval = 0;
14139 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14140 address = u_regval;
14141
14142 if (!a_bit)
14143 {
14144 /* Handle VST1. */
14145 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14146 {
14147 if (b_bits == 0x07)
14148 bf_regs = 1;
14149 else if (b_bits == 0x0a)
14150 bf_regs = 2;
14151 else if (b_bits == 0x06)
14152 bf_regs = 3;
14153 else if (b_bits == 0x02)
14154 bf_regs = 4;
14155 else
14156 bf_regs = 0;
14157
14158 for (index_r = 0; index_r < bf_regs; index_r++)
14159 {
14160 for (index_e = 0; index_e < f_elem; index_e++)
14161 {
14162 record_buf_mem[index_m++] = f_ebytes;
14163 record_buf_mem[index_m++] = address;
14164 address = address + f_ebytes;
14165 thumb2_insn_r->mem_rec_count += 1;
14166 }
14167 }
14168 }
14169 /* Handle VST2. */
14170 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14171 {
14172 if (b_bits == 0x09 || b_bits == 0x08)
14173 bf_regs = 1;
14174 else if (b_bits == 0x03)
14175 bf_regs = 2;
14176 else
14177 bf_regs = 0;
14178
14179 for (index_r = 0; index_r < bf_regs; index_r++)
14180 for (index_e = 0; index_e < f_elem; index_e++)
14181 {
14182 for (loop_t = 0; loop_t < 2; loop_t++)
14183 {
14184 record_buf_mem[index_m++] = f_ebytes;
14185 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14186 thumb2_insn_r->mem_rec_count += 1;
14187 }
14188 address = address + (2 * f_ebytes);
14189 }
14190 }
14191 /* Handle VST3. */
14192 else if ((b_bits & 0x0e) == 0x04)
14193 {
14194 for (index_e = 0; index_e < f_elem; index_e++)
14195 {
14196 for (loop_t = 0; loop_t < 3; loop_t++)
14197 {
14198 record_buf_mem[index_m++] = f_ebytes;
14199 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14200 thumb2_insn_r->mem_rec_count += 1;
14201 }
14202 address = address + (3 * f_ebytes);
14203 }
14204 }
14205 /* Handle VST4. */
14206 else if (!(b_bits & 0x0e))
14207 {
14208 for (index_e = 0; index_e < f_elem; index_e++)
14209 {
14210 for (loop_t = 0; loop_t < 4; loop_t++)
14211 {
14212 record_buf_mem[index_m++] = f_ebytes;
14213 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14214 thumb2_insn_r->mem_rec_count += 1;
14215 }
14216 address = address + (4 * f_ebytes);
14217 }
14218 }
14219 }
14220 else
14221 {
14222 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14223
14224 if (bft_size == 0x00)
14225 f_ebytes = 1;
14226 else if (bft_size == 0x01)
14227 f_ebytes = 2;
14228 else if (bft_size == 0x02)
14229 f_ebytes = 4;
14230 else
14231 f_ebytes = 0;
14232
14233 /* Handle VST1. */
14234 if (!(b_bits & 0x0b) || b_bits == 0x08)
14235 thumb2_insn_r->mem_rec_count = 1;
14236 /* Handle VST2. */
14237 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14238 thumb2_insn_r->mem_rec_count = 2;
14239 /* Handle VST3. */
14240 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14241 thumb2_insn_r->mem_rec_count = 3;
14242 /* Handle VST4. */
14243 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14244 thumb2_insn_r->mem_rec_count = 4;
14245
14246 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14247 {
14248 record_buf_mem[index_m] = f_ebytes;
14249 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14250 }
14251 }
14252 }
14253 else
14254 {
14255 if (!a_bit)
14256 {
14257 /* Handle VLD1. */
14258 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14259 thumb2_insn_r->reg_rec_count = 1;
14260 /* Handle VLD2. */
14261 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14262 thumb2_insn_r->reg_rec_count = 2;
14263 /* Handle VLD3. */
14264 else if ((b_bits & 0x0e) == 0x04)
14265 thumb2_insn_r->reg_rec_count = 3;
14266 /* Handle VLD4. */
14267 else if (!(b_bits & 0x0e))
14268 thumb2_insn_r->reg_rec_count = 4;
14269 }
14270 else
14271 {
14272 /* Handle VLD1. */
14273 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14274 thumb2_insn_r->reg_rec_count = 1;
14275 /* Handle VLD2. */
14276 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14277 thumb2_insn_r->reg_rec_count = 2;
14278 /* Handle VLD3. */
14279 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14280 thumb2_insn_r->reg_rec_count = 3;
14281 /* Handle VLD4. */
14282 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14283 thumb2_insn_r->reg_rec_count = 4;
14284
14285 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14286 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14287 }
14288 }
14289
14290 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14291 {
14292 record_buf[index_r] = reg_rn;
14293 thumb2_insn_r->reg_rec_count += 1;
14294 }
14295
14296 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14297 record_buf);
14298 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14299 record_buf_mem);
14300 return 0;
14301 }
14302
14303 /* Decodes thumb2 instruction type and invokes its record handler. */
14304
14305 static unsigned int
14306 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14307 {
14308 uint32_t op, op1, op2;
14309
14310 op = bit (thumb2_insn_r->arm_insn, 15);
14311 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14312 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14313
14314 if (op1 == 0x01)
14315 {
14316 if (!(op2 & 0x64 ))
14317 {
14318 /* Load/store multiple instruction. */
14319 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14320 }
14321 else if ((op2 & 0x64) == 0x4)
14322 {
14323 /* Load/store (dual/exclusive) and table branch instruction. */
14324 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14325 }
14326 else if ((op2 & 0x60) == 0x20)
14327 {
14328 /* Data-processing (shifted register). */
14329 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14330 }
14331 else if (op2 & 0x40)
14332 {
14333 /* Co-processor instructions. */
14334 return thumb2_record_coproc_insn (thumb2_insn_r);
14335 }
14336 }
14337 else if (op1 == 0x02)
14338 {
14339 if (op)
14340 {
14341 /* Branches and miscellaneous control instructions. */
14342 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14343 }
14344 else if (op2 & 0x20)
14345 {
14346 /* Data-processing (plain binary immediate) instruction. */
14347 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14348 }
14349 else
14350 {
14351 /* Data-processing (modified immediate). */
14352 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14353 }
14354 }
14355 else if (op1 == 0x03)
14356 {
14357 if (!(op2 & 0x71 ))
14358 {
14359 /* Store single data item. */
14360 return thumb2_record_str_single_data (thumb2_insn_r);
14361 }
14362 else if (!((op2 & 0x71) ^ 0x10))
14363 {
14364 /* Advanced SIMD or structure load/store instructions. */
14365 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14366 }
14367 else if (!((op2 & 0x67) ^ 0x01))
14368 {
14369 /* Load byte, memory hints instruction. */
14370 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14371 }
14372 else if (!((op2 & 0x67) ^ 0x03))
14373 {
14374 /* Load halfword, memory hints instruction. */
14375 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14376 }
14377 else if (!((op2 & 0x67) ^ 0x05))
14378 {
14379 /* Load word instruction. */
14380 return thumb2_record_ld_word (thumb2_insn_r);
14381 }
14382 else if (!((op2 & 0x70) ^ 0x20))
14383 {
14384 /* Data-processing (register) instruction. */
14385 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14386 }
14387 else if (!((op2 & 0x78) ^ 0x30))
14388 {
14389 /* Multiply, multiply accumulate, abs diff instruction. */
14390 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14391 }
14392 else if (!((op2 & 0x78) ^ 0x38))
14393 {
14394 /* Long multiply, long multiply accumulate, and divide. */
14395 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14396 }
14397 else if (op2 & 0x40)
14398 {
14399 /* Co-processor instructions. */
14400 return thumb2_record_coproc_insn (thumb2_insn_r);
14401 }
14402 }
14403
14404 return -1;
14405 }
14406
14407 namespace {
14408 /* Abstract instruction reader. */
14409
14410 class abstract_instruction_reader
14411 {
14412 public:
14413 /* Read one instruction of size LEN from address MEMADDR and using
14414 BYTE_ORDER endianness. */
14415
14416 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14417 enum bfd_endian byte_order) = 0;
14418 };
14419
14420 /* Instruction reader from real target. */
14421
14422 class instruction_reader : public abstract_instruction_reader
14423 {
14424 public:
14425 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14426 enum bfd_endian byte_order) override
14427 {
14428 return read_code_unsigned_integer (memaddr, len, byte_order);
14429 }
14430 };
14431
14432 } // namespace
14433
14434 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14435
14436 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14437 dispatch it. */
14438
14439 static int
14440 decode_insn (abstract_instruction_reader &reader,
14441 arm_insn_decode_record *arm_record,
14442 record_type_t record_type, uint32_t insn_size)
14443 {
14444
14445 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14446 instruction. */
14447 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14448 {
14449 arm_record_data_proc_misc_ld_str, /* 000. */
14450 arm_record_data_proc_imm, /* 001. */
14451 arm_record_ld_st_imm_offset, /* 010. */
14452 arm_record_ld_st_reg_offset, /* 011. */
14453 arm_record_ld_st_multiple, /* 100. */
14454 arm_record_b_bl, /* 101. */
14455 arm_record_asimd_vfp_coproc, /* 110. */
14456 arm_record_coproc_data_proc /* 111. */
14457 };
14458
14459 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14460 instruction. */
14461 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14462 { \
14463 thumb_record_shift_add_sub, /* 000. */
14464 thumb_record_add_sub_cmp_mov, /* 001. */
14465 thumb_record_ld_st_reg_offset, /* 010. */
14466 thumb_record_ld_st_imm_offset, /* 011. */
14467 thumb_record_ld_st_stack, /* 100. */
14468 thumb_record_misc, /* 101. */
14469 thumb_record_ldm_stm_swi, /* 110. */
14470 thumb_record_branch /* 111. */
14471 };
14472
14473 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14474 uint32_t insn_id = 0;
14475 enum bfd_endian code_endian
14476 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14477 arm_record->arm_insn
14478 = reader.read (arm_record->this_addr, insn_size, code_endian);
14479
14480 if (ARM_RECORD == record_type)
14481 {
14482 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14483 insn_id = bits (arm_record->arm_insn, 25, 27);
14484
14485 if (arm_record->cond == 0xf)
14486 ret = arm_record_extension_space (arm_record);
14487 else
14488 {
14489 /* If this insn has fallen into extension space
14490 then we need not decode it anymore. */
14491 ret = arm_handle_insn[insn_id] (arm_record);
14492 }
14493 if (ret != ARM_RECORD_SUCCESS)
14494 {
14495 arm_record_unsupported_insn (arm_record);
14496 ret = -1;
14497 }
14498 }
14499 else if (THUMB_RECORD == record_type)
14500 {
14501 /* As thumb does not have condition codes, we set negative. */
14502 arm_record->cond = -1;
14503 insn_id = bits (arm_record->arm_insn, 13, 15);
14504 ret = thumb_handle_insn[insn_id] (arm_record);
14505 if (ret != ARM_RECORD_SUCCESS)
14506 {
14507 arm_record_unsupported_insn (arm_record);
14508 ret = -1;
14509 }
14510 }
14511 else if (THUMB2_RECORD == record_type)
14512 {
14513 /* As thumb does not have condition codes, we set negative. */
14514 arm_record->cond = -1;
14515
14516 /* Swap first half of 32bit thumb instruction with second half. */
14517 arm_record->arm_insn
14518 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14519
14520 ret = thumb2_record_decode_insn_handler (arm_record);
14521
14522 if (ret != ARM_RECORD_SUCCESS)
14523 {
14524 arm_record_unsupported_insn (arm_record);
14525 ret = -1;
14526 }
14527 }
14528 else
14529 {
14530 /* Throw assertion. */
14531 gdb_assert_not_reached ("not a valid instruction, could not decode");
14532 }
14533
14534 return ret;
14535 }
14536
14537 #if GDB_SELF_TEST
14538 namespace selftests {
14539
14540 /* Instruction reader class for selftests.
14541
14542 For 16-bit Thumb instructions, an array of uint16_t should be used.
14543
14544 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14545 of uint32_t should be used. */
14546
14547 template<typename T>
14548 class instruction_reader_selftest : public abstract_instruction_reader
14549 {
14550 public:
14551 template<size_t SIZE>
14552 instruction_reader_selftest (const T (&insns)[SIZE])
14553 : m_insns (insns), m_insns_size (SIZE)
14554 {}
14555
14556 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14557 enum bfd_endian byte_order) override
14558 {
14559 SELF_CHECK (length == sizeof (T));
14560 SELF_CHECK (memaddr % sizeof (T) == 0);
14561 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14562
14563 return m_insns[memaddr / sizeof (T)];
14564 }
14565
14566 private:
14567 const T *m_insns;
14568 const size_t m_insns_size;
14569 };
14570
14571 static void
14572 arm_record_test (void)
14573 {
14574 struct gdbarch_info info;
14575 info.bfd_arch_info = bfd_scan_arch ("arm");
14576
14577 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14578
14579 SELF_CHECK (gdbarch != NULL);
14580
14581 /* 16-bit Thumb instructions. */
14582 {
14583 arm_insn_decode_record arm_record;
14584
14585 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14586 arm_record.gdbarch = gdbarch;
14587
14588 /* Use the endian-free representation of the instructions here. The test
14589 will handle endianness conversions. */
14590 static const uint16_t insns[] = {
14591 /* db b2 uxtb r3, r3 */
14592 0xb2db,
14593 /* cd 58 ldr r5, [r1, r3] */
14594 0x58cd,
14595 };
14596
14597 instruction_reader_selftest<uint16_t> reader (insns);
14598 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14599 THUMB_INSN_SIZE_BYTES);
14600
14601 SELF_CHECK (ret == 0);
14602 SELF_CHECK (arm_record.mem_rec_count == 0);
14603 SELF_CHECK (arm_record.reg_rec_count == 1);
14604 SELF_CHECK (arm_record.arm_regs[0] == 3);
14605
14606 arm_record.this_addr += 2;
14607 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14608 THUMB_INSN_SIZE_BYTES);
14609
14610 SELF_CHECK (ret == 0);
14611 SELF_CHECK (arm_record.mem_rec_count == 0);
14612 SELF_CHECK (arm_record.reg_rec_count == 1);
14613 SELF_CHECK (arm_record.arm_regs[0] == 5);
14614 }
14615
14616 /* 32-bit Thumb-2 instructions. */
14617 {
14618 arm_insn_decode_record arm_record;
14619
14620 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14621 arm_record.gdbarch = gdbarch;
14622
14623 /* Use the endian-free representation of the instruction here. The test
14624 will handle endianness conversions. */
14625 static const uint32_t insns[] = {
14626 /* mrc 15, 0, r7, cr13, cr0, {3} */
14627 0x7f70ee1d,
14628 };
14629
14630 instruction_reader_selftest<uint32_t> reader (insns);
14631 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14632 THUMB2_INSN_SIZE_BYTES);
14633
14634 SELF_CHECK (ret == 0);
14635 SELF_CHECK (arm_record.mem_rec_count == 0);
14636 SELF_CHECK (arm_record.reg_rec_count == 1);
14637 SELF_CHECK (arm_record.arm_regs[0] == 7);
14638 }
14639
14640 /* 32-bit instructions. */
14641 {
14642 arm_insn_decode_record arm_record;
14643
14644 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14645 arm_record.gdbarch = gdbarch;
14646
14647 /* Use the endian-free representation of the instruction here. The test
14648 will handle endianness conversions. */
14649 static const uint32_t insns[] = {
14650 /* mov r5, r0 */
14651 0xe1a05000,
14652 };
14653
14654 instruction_reader_selftest<uint32_t> reader (insns);
14655 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14656 ARM_INSN_SIZE_BYTES);
14657
14658 SELF_CHECK (ret == 0);
14659 }
14660 }
14661
14662 /* Instruction reader from manually cooked instruction sequences. */
14663
14664 class test_arm_instruction_reader : public arm_instruction_reader
14665 {
14666 public:
14667 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14668 : m_insns (insns)
14669 {}
14670
14671 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14672 {
14673 SELF_CHECK (memaddr % 4 == 0);
14674 SELF_CHECK (memaddr / 4 < m_insns.size ());
14675
14676 return m_insns[memaddr / 4];
14677 }
14678
14679 private:
14680 const gdb::array_view<const uint32_t> m_insns;
14681 };
14682
14683 static void
14684 arm_analyze_prologue_test ()
14685 {
14686 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14687 {
14688 struct gdbarch_info info;
14689 info.byte_order = endianness;
14690 info.byte_order_for_code = endianness;
14691 info.bfd_arch_info = bfd_scan_arch ("arm");
14692
14693 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14694
14695 SELF_CHECK (gdbarch != NULL);
14696
14697 /* The "sub" instruction contains an immediate value rotate count of 0,
14698 which resulted in a 32-bit shift of a 32-bit value, caught by
14699 UBSan. */
14700 const uint32_t insns[] = {
14701 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14702 0xe1a05000, /* mov r5, r0 */
14703 0xe5903020, /* ldr r3, [r0, #32] */
14704 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14705 };
14706
14707 test_arm_instruction_reader mem_reader (insns);
14708 arm_prologue_cache cache;
14709 arm_cache_init (&cache, gdbarch);
14710
14711 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14712 }
14713 }
14714
14715 } // namespace selftests
14716 #endif /* GDB_SELF_TEST */
14717
14718 /* Cleans up local record registers and memory allocations. */
14719
14720 static void
14721 deallocate_reg_mem (arm_insn_decode_record *record)
14722 {
14723 xfree (record->arm_regs);
14724 xfree (record->arm_mems);
14725 }
14726
14727
14728 /* Parse the current instruction and record the values of the registers and
14729 memory that will be changed in current instruction to record_arch_list".
14730 Return -1 if something is wrong. */
14731
14732 int
14733 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14734 CORE_ADDR insn_addr)
14735 {
14736
14737 uint32_t no_of_rec = 0;
14738 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14739 ULONGEST t_bit = 0, insn_id = 0;
14740
14741 ULONGEST u_regval = 0;
14742
14743 arm_insn_decode_record arm_record;
14744
14745 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14746 arm_record.regcache = regcache;
14747 arm_record.this_addr = insn_addr;
14748 arm_record.gdbarch = gdbarch;
14749
14750
14751 if (record_debug > 1)
14752 {
14753 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14754 "addr = %s\n",
14755 paddress (gdbarch, arm_record.this_addr));
14756 }
14757
14758 instruction_reader reader;
14759 enum bfd_endian code_endian
14760 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14761 arm_record.arm_insn
14762 = reader.read (arm_record.this_addr, 2, code_endian);
14763
14764 /* Check the insn, whether it is thumb or arm one. */
14765
14766 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14767 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14768
14769
14770 if (!(u_regval & t_bit))
14771 {
14772 /* We are decoding arm insn. */
14773 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14774 }
14775 else
14776 {
14777 insn_id = bits (arm_record.arm_insn, 11, 15);
14778 /* is it thumb2 insn? */
14779 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14780 {
14781 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14782 THUMB2_INSN_SIZE_BYTES);
14783 }
14784 else
14785 {
14786 /* We are decoding thumb insn. */
14787 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14788 THUMB_INSN_SIZE_BYTES);
14789 }
14790 }
14791
14792 if (0 == ret)
14793 {
14794 /* Record registers. */
14795 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14796 if (arm_record.arm_regs)
14797 {
14798 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14799 {
14800 if (record_full_arch_list_add_reg
14801 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14802 ret = -1;
14803 }
14804 }
14805 /* Record memories. */
14806 if (arm_record.arm_mems)
14807 {
14808 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14809 {
14810 if (record_full_arch_list_add_mem
14811 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14812 arm_record.arm_mems[no_of_rec].len))
14813 ret = -1;
14814 }
14815 }
14816
14817 if (record_full_arch_list_add_end ())
14818 ret = -1;
14819 }
14820
14821
14822 deallocate_reg_mem (&arm_record);
14823
14824 return ret;
14825 }
14826
14827 /* See arm-tdep.h. */
14828
14829 const target_desc *
14830 arm_read_description (arm_fp_type fp_type, bool tls)
14831 {
14832 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14833
14834 if (tdesc == nullptr)
14835 {
14836 tdesc = arm_create_target_description (fp_type, tls);
14837 tdesc_arm_list[fp_type][tls] = tdesc;
14838 }
14839
14840 return tdesc;
14841 }
14842
14843 /* See arm-tdep.h. */
14844
14845 const target_desc *
14846 arm_read_mprofile_description (arm_m_profile_type m_type)
14847 {
14848 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14849
14850 if (tdesc == nullptr)
14851 {
14852 tdesc = arm_create_mprofile_target_description (m_type);
14853 tdesc_arm_mprofile_list[m_type] = tdesc;
14854 }
14855
14856 return tdesc;
14857 }