gdb: remove BLOCK_FUNCTION macro
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2.h"
42 #include "dwarf2/frame.h"
43 #include "gdbtypes.h"
44 #include "prologue-value.h"
45 #include "remote.h"
46 #include "target-descriptions.h"
47 #include "user-regs.h"
48 #include "observable.h"
49 #include "count-one-bits.h"
50
51 #include "arch/arm.h"
52 #include "arch/arm-get-next-pcs.h"
53 #include "arm-tdep.h"
54 #include "gdb/sim-arm.h"
55
56 #include "elf-bfd.h"
57 #include "coff/internal.h"
58 #include "elf/arm.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "producer.h"
65
66 #if GDB_SELF_TEST
67 #include "gdbsupport/selftest.h"
68 #endif
69
70 static bool arm_debug;
71
72 /* Print an "arm" debug statement. */
73
74 #define arm_debug_printf(fmt, ...) \
75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76
77 /* Macros for setting and testing a bit in a minimal symbol that marks
78 it as Thumb function. The MSB of the minimal symbol's "info" field
79 is used for this purpose.
80
81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83
84 #define MSYMBOL_SET_SPECIAL(msym) \
85 (msym)->set_target_flag_1 (true)
86
87 #define MSYMBOL_IS_SPECIAL(msym) \
88 (msym)->target_flag_1 ()
89
90 struct arm_mapping_symbol
91 {
92 CORE_ADDR value;
93 char type;
94
95 bool operator< (const arm_mapping_symbol &other) const
96 { return this->value < other.value; }
97 };
98
99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
100
101 struct arm_per_bfd
102 {
103 explicit arm_per_bfd (size_t num_sections)
104 : section_maps (new arm_mapping_symbol_vec[num_sections]),
105 section_maps_sorted (new bool[num_sections] ())
106 {}
107
108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109
110 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111
112 The format is an array of vectors of arm_mapping_symbols, there is one
113 vector for each section of the objfile (the array is index by BFD section
114 index).
115
116 For each section, the vector of arm_mapping_symbol is sorted by
117 symbol value (address). */
118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119
120 /* For each corresponding element of section_maps above, is this vector
121 sorted. */
122 std::unique_ptr<bool[]> section_maps_sorted;
123 };
124
125 /* Per-bfd data used for mapping symbols. */
126 static bfd_key<arm_per_bfd> arm_bfd_data_key;
127
128 /* The list of available "set arm ..." and "show arm ..." commands. */
129 static struct cmd_list_element *setarmcmdlist = NULL;
130 static struct cmd_list_element *showarmcmdlist = NULL;
131
132 /* The type of floating-point to use. Keep this in sync with enum
133 arm_float_model, and the help string in _initialize_arm_tdep. */
134 static const char *const fp_model_strings[] =
135 {
136 "auto",
137 "softfpa",
138 "fpa",
139 "softvfp",
140 "vfp",
141 NULL
142 };
143
144 /* A variable that can be configured by the user. */
145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
146 static const char *current_fp_model = "auto";
147
148 /* The ABI to use. Keep this in sync with arm_abi_kind. */
149 static const char *const arm_abi_strings[] =
150 {
151 "auto",
152 "APCS",
153 "AAPCS",
154 NULL
155 };
156
157 /* A variable that can be configured by the user. */
158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
159 static const char *arm_abi_string = "auto";
160
161 /* The execution mode to assume. */
162 static const char *const arm_mode_strings[] =
163 {
164 "auto",
165 "arm",
166 "thumb",
167 NULL
168 };
169
170 static const char *arm_fallback_mode_string = "auto";
171 static const char *arm_force_mode_string = "auto";
172
173 /* The standard register names, and all the valid aliases for them. Note
174 that `fp', `sp' and `pc' are not added in this alias list, because they
175 have been added as builtin user registers in
176 std-regs.c:_initialize_frame_reg. */
177 static const struct
178 {
179 const char *name;
180 int regnum;
181 } arm_register_aliases[] = {
182 /* Basic register numbers. */
183 { "r0", 0 },
184 { "r1", 1 },
185 { "r2", 2 },
186 { "r3", 3 },
187 { "r4", 4 },
188 { "r5", 5 },
189 { "r6", 6 },
190 { "r7", 7 },
191 { "r8", 8 },
192 { "r9", 9 },
193 { "r10", 10 },
194 { "r11", 11 },
195 { "r12", 12 },
196 { "r13", 13 },
197 { "r14", 14 },
198 { "r15", 15 },
199 /* Synonyms (argument and variable registers). */
200 { "a1", 0 },
201 { "a2", 1 },
202 { "a3", 2 },
203 { "a4", 3 },
204 { "v1", 4 },
205 { "v2", 5 },
206 { "v3", 6 },
207 { "v4", 7 },
208 { "v5", 8 },
209 { "v6", 9 },
210 { "v7", 10 },
211 { "v8", 11 },
212 /* Other platform-specific names for r9. */
213 { "sb", 9 },
214 { "tr", 9 },
215 /* Special names. */
216 { "ip", 12 },
217 { "lr", 14 },
218 /* Names used by GCC (not listed in the ARM EABI). */
219 { "sl", 10 },
220 /* A special name from the older ATPCS. */
221 { "wr", 7 },
222 };
223
224 static const char *const arm_register_names[] =
225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
231 "fps", "cpsr" }; /* 24 25 */
232
233 /* Holds the current set of options to be passed to the disassembler. */
234 static char *arm_disassembler_options;
235
236 /* Valid register name styles. */
237 static const char **valid_disassembly_styles;
238
239 /* Disassembly style to use. Default to "std" register names. */
240 static const char *disassembly_style;
241
242 /* All possible arm target descriptors. */
243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245
246 /* This is used to keep the bfd arch_info in sync with the disassembly
247 style. */
248 static void set_disassembly_style_sfunc (const char *, int,
249 struct cmd_list_element *);
250 static void show_disassembly_style_sfunc (struct ui_file *, int,
251 struct cmd_list_element *,
252 const char *);
253
254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
255 readable_regcache *regcache,
256 int regnum, gdb_byte *buf);
257 static void arm_neon_quad_write (struct gdbarch *gdbarch,
258 struct regcache *regcache,
259 int regnum, const gdb_byte *buf);
260
261 static CORE_ADDR
262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
263
264
265 /* get_next_pcs operations. */
266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
267 arm_get_next_pcs_read_memory_unsigned_integer,
268 arm_get_next_pcs_syscall_next_pc,
269 arm_get_next_pcs_addr_bits_remove,
270 arm_get_next_pcs_is_thumb,
271 NULL,
272 };
273
274 struct arm_prologue_cache
275 {
276 /* The stack pointer at the time this frame was created; i.e. the
277 caller's stack pointer when this function was called. It is used
278 to identify this frame. */
279 CORE_ADDR sp;
280
281 /* Additional stack pointers used by M-profile with Security extension. */
282 /* Use msp_s / psp_s to hold the values of msp / psp when there is
283 no Security extension. */
284 CORE_ADDR msp_s;
285 CORE_ADDR msp_ns;
286 CORE_ADDR psp_s;
287 CORE_ADDR psp_ns;
288
289 /* Active stack pointer. */
290 int active_sp_regnum;
291
292 /* The frame base for this frame is just prev_sp - frame size.
293 FRAMESIZE is the distance from the frame pointer to the
294 initial stack pointer. */
295
296 int framesize;
297
298 /* The register used to hold the frame pointer for this frame. */
299 int framereg;
300
301 /* True if the return address is signed, false otherwise. */
302 gdb::optional<bool> ra_signed_state;
303
304 /* Saved register offsets. */
305 trad_frame_saved_reg *saved_regs;
306
307 arm_prologue_cache() = default;
308 };
309
310 /* Initialize stack pointers, and flag the active one. */
311
312 static inline void
313 arm_cache_init_sp (int regnum, CORE_ADDR* member,
314 struct arm_prologue_cache *cache,
315 struct frame_info *frame)
316 {
317 CORE_ADDR val = get_frame_register_unsigned (frame, regnum);
318 if (val == cache->sp)
319 cache->active_sp_regnum = regnum;
320
321 *member = val;
322 }
323
324 /* Initialize CACHE fields for which zero is not adequate (CACHE is
325 expected to have been ZALLOC'ed before calling this function). */
326
327 static void
328 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
329 {
330 cache->active_sp_regnum = ARM_SP_REGNUM;
331
332 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
333 }
334
335 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
336
337 static void
338 arm_cache_init (struct arm_prologue_cache *cache, struct frame_info *frame)
339 {
340 struct gdbarch *gdbarch = get_frame_arch (frame);
341 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
342
343 arm_cache_init (cache, gdbarch);
344
345 if (tdep->have_sec_ext)
346 {
347 arm_cache_init_sp (tdep->m_profile_msp_s_regnum, &cache->msp_s, cache, frame);
348 arm_cache_init_sp (tdep->m_profile_psp_s_regnum, &cache->psp_s, cache, frame);
349 arm_cache_init_sp (tdep->m_profile_msp_ns_regnum, &cache->msp_ns, cache, frame);
350 arm_cache_init_sp (tdep->m_profile_psp_ns_regnum, &cache->psp_ns, cache, frame);
351
352 /* Use MSP_S as default stack pointer. */
353 if (cache->active_sp_regnum == ARM_SP_REGNUM)
354 cache->active_sp_regnum = tdep->m_profile_msp_s_regnum;
355 }
356 else if (tdep->is_m)
357 {
358 arm_cache_init_sp (tdep->m_profile_msp_regnum, &cache->msp_s, cache, frame);
359 arm_cache_init_sp (tdep->m_profile_psp_regnum, &cache->psp_s, cache, frame);
360 }
361 else
362 arm_cache_init_sp (ARM_SP_REGNUM, &cache->msp_s, cache, frame);
363 }
364
365 /* Return the requested stack pointer value (in REGNUM), taking into
366 account whether we have a Security extension or an M-profile
367 CPU. */
368
369 static CORE_ADDR
370 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
371 arm_gdbarch_tdep *tdep, int regnum)
372 {
373 if (regnum == ARM_SP_REGNUM)
374 return cache->sp;
375
376 if (tdep->have_sec_ext)
377 {
378 if (regnum == tdep->m_profile_msp_s_regnum)
379 return cache->msp_s;
380 if (regnum == tdep->m_profile_msp_ns_regnum)
381 return cache->msp_ns;
382 if (regnum == tdep->m_profile_psp_s_regnum)
383 return cache->psp_s;
384 if (regnum == tdep->m_profile_psp_ns_regnum)
385 return cache->psp_ns;
386 }
387 else if (tdep->is_m)
388 {
389 if (regnum == tdep->m_profile_msp_regnum)
390 return cache->msp_s;
391 if (regnum == tdep->m_profile_psp_regnum)
392 return cache->psp_s;
393 }
394
395 gdb_assert_not_reached ("Invalid SP selection");
396 }
397
398 /* Return the previous stack address, depending on which SP register
399 is active. */
400
401 static CORE_ADDR
402 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
403 {
404 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
405 return val;
406 }
407
408 /* Set the active stack pointer to VAL. */
409
410 static void
411 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
412 arm_gdbarch_tdep *tdep, CORE_ADDR val)
413 {
414 if (cache->active_sp_regnum == ARM_SP_REGNUM)
415 {
416 cache->sp = val;
417 return;
418 }
419
420 if (tdep->have_sec_ext)
421 {
422 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
423 cache->msp_s = val;
424 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
425 cache->msp_ns = val;
426 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
427 cache->psp_s = val;
428 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
429 cache->psp_ns = val;
430
431 return;
432 }
433 else if (tdep->is_m)
434 {
435 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
436 cache->msp_s = val;
437 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
438 cache->psp_s = val;
439
440 return;
441 }
442
443 gdb_assert_not_reached ("Invalid SP selection");
444 }
445
446 /* Return true if REGNUM is one of the stack pointers. */
447
448 static bool
449 arm_cache_is_sp_register (struct arm_prologue_cache *cache,
450 arm_gdbarch_tdep *tdep, int regnum)
451 {
452 if ((regnum == ARM_SP_REGNUM)
453 || (regnum == tdep->m_profile_msp_regnum)
454 || (regnum == tdep->m_profile_msp_s_regnum)
455 || (regnum == tdep->m_profile_msp_ns_regnum)
456 || (regnum == tdep->m_profile_psp_regnum)
457 || (regnum == tdep->m_profile_psp_s_regnum)
458 || (regnum == tdep->m_profile_psp_ns_regnum))
459 return true;
460 else
461 return false;
462 }
463
464 /* Set the active stack pointer to SP_REGNUM. */
465
466 static void
467 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
468 arm_gdbarch_tdep *tdep, int sp_regnum)
469 {
470 gdb_assert (sp_regnum != ARM_SP_REGNUM);
471 gdb_assert (arm_cache_is_sp_register (cache, tdep, sp_regnum));
472
473 if (tdep->have_sec_ext)
474 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
475 && sp_regnum != tdep->m_profile_psp_regnum);
476
477 cache->active_sp_regnum = sp_regnum;
478 }
479
480 namespace {
481
482 /* Abstract class to read ARM instructions from memory. */
483
484 class arm_instruction_reader
485 {
486 public:
487 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
488 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
489 };
490
491 /* Read instructions from target memory. */
492
493 class target_arm_instruction_reader : public arm_instruction_reader
494 {
495 public:
496 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
497 {
498 return read_code_unsigned_integer (memaddr, 4, byte_order);
499 }
500 };
501
502 } /* namespace */
503
504 static CORE_ADDR arm_analyze_prologue
505 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
506 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
507
508 /* Architecture version for displaced stepping. This effects the behaviour of
509 certain instructions, and really should not be hard-wired. */
510
511 #define DISPLACED_STEPPING_ARCH_VERSION 5
512
513 /* See arm-tdep.h. */
514
515 bool arm_apcs_32 = true;
516 bool arm_unwind_secure_frames = true;
517
518 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
519
520 int
521 arm_psr_thumb_bit (struct gdbarch *gdbarch)
522 {
523 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
524
525 if (tdep->is_m)
526 return XPSR_T;
527 else
528 return CPSR_T;
529 }
530
531 /* Determine if the processor is currently executing in Thumb mode. */
532
533 int
534 arm_is_thumb (struct regcache *regcache)
535 {
536 ULONGEST cpsr;
537 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
538
539 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
540
541 return (cpsr & t_bit) != 0;
542 }
543
544 /* Determine if FRAME is executing in Thumb mode. */
545
546 int
547 arm_frame_is_thumb (struct frame_info *frame)
548 {
549 CORE_ADDR cpsr;
550 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
551
552 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
553 directly (from a signal frame or dummy frame) or by interpreting
554 the saved LR (from a prologue or DWARF frame). So consult it and
555 trust the unwinders. */
556 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
557
558 return (cpsr & t_bit) != 0;
559 }
560
561 /* Search for the mapping symbol covering MEMADDR. If one is found,
562 return its type. Otherwise, return 0. If START is non-NULL,
563 set *START to the location of the mapping symbol. */
564
565 static char
566 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
567 {
568 struct obj_section *sec;
569
570 /* If there are mapping symbols, consult them. */
571 sec = find_pc_section (memaddr);
572 if (sec != NULL)
573 {
574 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
575 if (data != NULL)
576 {
577 unsigned int section_idx = sec->the_bfd_section->index;
578 arm_mapping_symbol_vec &map
579 = data->section_maps[section_idx];
580
581 /* Sort the vector on first use. */
582 if (!data->section_maps_sorted[section_idx])
583 {
584 std::sort (map.begin (), map.end ());
585 data->section_maps_sorted[section_idx] = true;
586 }
587
588 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
589 arm_mapping_symbol_vec::const_iterator it
590 = std::lower_bound (map.begin (), map.end (), map_key);
591
592 /* std::lower_bound finds the earliest ordered insertion
593 point. If the symbol at this position starts at this exact
594 address, we use that; otherwise, the preceding
595 mapping symbol covers this address. */
596 if (it < map.end ())
597 {
598 if (it->value == map_key.value)
599 {
600 if (start)
601 *start = it->value + sec->addr ();
602 return it->type;
603 }
604 }
605
606 if (it > map.begin ())
607 {
608 arm_mapping_symbol_vec::const_iterator prev_it
609 = it - 1;
610
611 if (start)
612 *start = prev_it->value + sec->addr ();
613 return prev_it->type;
614 }
615 }
616 }
617
618 return 0;
619 }
620
621 /* Determine if the program counter specified in MEMADDR is in a Thumb
622 function. This function should be called for addresses unrelated to
623 any executing frame; otherwise, prefer arm_frame_is_thumb. */
624
625 int
626 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
627 {
628 struct bound_minimal_symbol sym;
629 char type;
630 arm_displaced_step_copy_insn_closure *dsc = nullptr;
631 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
632
633 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
634 dsc = ((arm_displaced_step_copy_insn_closure * )
635 gdbarch_displaced_step_copy_insn_closure_by_addr
636 (gdbarch, current_inferior (), memaddr));
637
638 /* If checking the mode of displaced instruction in copy area, the mode
639 should be determined by instruction on the original address. */
640 if (dsc)
641 {
642 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
643 (unsigned long) dsc->insn_addr,
644 (unsigned long) memaddr);
645 memaddr = dsc->insn_addr;
646 }
647
648 /* If bit 0 of the address is set, assume this is a Thumb address. */
649 if (IS_THUMB_ADDR (memaddr))
650 return 1;
651
652 /* If the user wants to override the symbol table, let him. */
653 if (strcmp (arm_force_mode_string, "arm") == 0)
654 return 0;
655 if (strcmp (arm_force_mode_string, "thumb") == 0)
656 return 1;
657
658 /* ARM v6-M and v7-M are always in Thumb mode. */
659 if (tdep->is_m)
660 return 1;
661
662 /* If there are mapping symbols, consult them. */
663 type = arm_find_mapping_symbol (memaddr, NULL);
664 if (type)
665 return type == 't';
666
667 /* Thumb functions have a "special" bit set in minimal symbols. */
668 sym = lookup_minimal_symbol_by_pc (memaddr);
669 if (sym.minsym)
670 return (MSYMBOL_IS_SPECIAL (sym.minsym));
671
672 /* If the user wants to override the fallback mode, let them. */
673 if (strcmp (arm_fallback_mode_string, "arm") == 0)
674 return 0;
675 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
676 return 1;
677
678 /* If we couldn't find any symbol, but we're talking to a running
679 target, then trust the current value of $cpsr. This lets
680 "display/i $pc" always show the correct mode (though if there is
681 a symbol table we will not reach here, so it still may not be
682 displayed in the mode it will be executed). */
683 if (target_has_registers ())
684 return arm_frame_is_thumb (get_current_frame ());
685
686 /* Otherwise we're out of luck; we assume ARM. */
687 return 0;
688 }
689
690 /* Determine if the address specified equals any of these magic return
691 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
692 architectures.
693
694 From ARMv6-M Reference Manual B1.5.8
695 Table B1-5 Exception return behavior
696
697 EXC_RETURN Return To Return Stack
698 0xFFFFFFF1 Handler mode Main
699 0xFFFFFFF9 Thread mode Main
700 0xFFFFFFFD Thread mode Process
701
702 From ARMv7-M Reference Manual B1.5.8
703 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
704
705 EXC_RETURN Return To Return Stack
706 0xFFFFFFF1 Handler mode Main
707 0xFFFFFFF9 Thread mode Main
708 0xFFFFFFFD Thread mode Process
709
710 Table B1-9 EXC_RETURN definition of exception return behavior, with
711 FP
712
713 EXC_RETURN Return To Return Stack Frame Type
714 0xFFFFFFE1 Handler mode Main Extended
715 0xFFFFFFE9 Thread mode Main Extended
716 0xFFFFFFED Thread mode Process Extended
717 0xFFFFFFF1 Handler mode Main Basic
718 0xFFFFFFF9 Thread mode Main Basic
719 0xFFFFFFFD Thread mode Process Basic
720
721 For more details see "B1.5.8 Exception return behavior"
722 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
723
724 In the ARMv8-M Architecture Technical Reference also adds
725 for implementations without the Security Extension:
726
727 EXC_RETURN Condition
728 0xFFFFFFB0 Return to Handler mode.
729 0xFFFFFFB8 Return to Thread mode using the main stack.
730 0xFFFFFFBC Return to Thread mode using the process stack. */
731
732 static int
733 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
734 {
735 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
736 if (tdep->have_sec_ext)
737 {
738 switch ((addr & 0xff000000))
739 {
740 case 0xff000000: /* EXC_RETURN pattern. */
741 case 0xfe000000: /* FNC_RETURN pattern. */
742 return 1;
743 default:
744 return 0;
745 }
746 }
747 else
748 {
749 switch (addr)
750 {
751 /* Values from ARMv8-M Architecture Technical Reference. */
752 case 0xffffffb0:
753 case 0xffffffb8:
754 case 0xffffffbc:
755 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
756 the exception return behavior. */
757 case 0xffffffe1:
758 case 0xffffffe9:
759 case 0xffffffed:
760 case 0xfffffff1:
761 case 0xfffffff9:
762 case 0xfffffffd:
763 /* Address is magic. */
764 return 1;
765
766 default:
767 /* Address is not magic. */
768 return 0;
769 }
770 }
771 }
772
773 /* Remove useless bits from addresses in a running program. */
774 static CORE_ADDR
775 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
776 {
777 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
778
779 /* On M-profile devices, do not strip the low bit from EXC_RETURN
780 (the magic exception return address). */
781 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
782 return val;
783
784 if (arm_apcs_32)
785 return UNMAKE_THUMB_ADDR (val);
786 else
787 return (val & 0x03fffffc);
788 }
789
790 /* Return 1 if PC is the start of a compiler helper function which
791 can be safely ignored during prologue skipping. IS_THUMB is true
792 if the function is known to be a Thumb function due to the way it
793 is being called. */
794 static int
795 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
796 {
797 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
798 struct bound_minimal_symbol msym;
799
800 msym = lookup_minimal_symbol_by_pc (pc);
801 if (msym.minsym != NULL
802 && msym.value_address () == pc
803 && msym.minsym->linkage_name () != NULL)
804 {
805 const char *name = msym.minsym->linkage_name ();
806
807 /* The GNU linker's Thumb call stub to foo is named
808 __foo_from_thumb. */
809 if (strstr (name, "_from_thumb") != NULL)
810 name += 2;
811
812 /* On soft-float targets, __truncdfsf2 is called to convert promoted
813 arguments to their argument types in non-prototyped
814 functions. */
815 if (startswith (name, "__truncdfsf2"))
816 return 1;
817 if (startswith (name, "__aeabi_d2f"))
818 return 1;
819
820 /* Internal functions related to thread-local storage. */
821 if (startswith (name, "__tls_get_addr"))
822 return 1;
823 if (startswith (name, "__aeabi_read_tp"))
824 return 1;
825 }
826 else
827 {
828 /* If we run against a stripped glibc, we may be unable to identify
829 special functions by name. Check for one important case,
830 __aeabi_read_tp, by comparing the *code* against the default
831 implementation (this is hand-written ARM assembler in glibc). */
832
833 if (!is_thumb
834 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
835 == 0xe3e00a0f /* mov r0, #0xffff0fff */
836 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
837 == 0xe240f01f) /* sub pc, r0, #31 */
838 return 1;
839 }
840
841 return 0;
842 }
843
844 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
845 the first 16-bit of instruction, and INSN2 is the second 16-bit of
846 instruction. */
847 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
848 ((bits ((insn1), 0, 3) << 12) \
849 | (bits ((insn1), 10, 10) << 11) \
850 | (bits ((insn2), 12, 14) << 8) \
851 | bits ((insn2), 0, 7))
852
853 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
854 the 32-bit instruction. */
855 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
856 ((bits ((insn), 16, 19) << 12) \
857 | bits ((insn), 0, 11))
858
859 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
860
861 static unsigned int
862 thumb_expand_immediate (unsigned int imm)
863 {
864 unsigned int count = imm >> 7;
865
866 if (count < 8)
867 switch (count / 2)
868 {
869 case 0:
870 return imm & 0xff;
871 case 1:
872 return (imm & 0xff) | ((imm & 0xff) << 16);
873 case 2:
874 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
875 case 3:
876 return (imm & 0xff) | ((imm & 0xff) << 8)
877 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
878 }
879
880 return (0x80 | (imm & 0x7f)) << (32 - count);
881 }
882
883 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
884 epilogue, 0 otherwise. */
885
886 static int
887 thumb_instruction_restores_sp (unsigned short insn)
888 {
889 return (insn == 0x46bd /* mov sp, r7 */
890 || (insn & 0xff80) == 0xb000 /* add sp, imm */
891 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
892 }
893
894 /* Analyze a Thumb prologue, looking for a recognizable stack frame
895 and frame pointer. Scan until we encounter a store that could
896 clobber the stack frame unexpectedly, or an unknown instruction.
897 Return the last address which is definitely safe to skip for an
898 initial breakpoint. */
899
900 static CORE_ADDR
901 thumb_analyze_prologue (struct gdbarch *gdbarch,
902 CORE_ADDR start, CORE_ADDR limit,
903 struct arm_prologue_cache *cache)
904 {
905 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
906 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
907 int i;
908 pv_t regs[16];
909 CORE_ADDR offset;
910 CORE_ADDR unrecognized_pc = 0;
911
912 for (i = 0; i < 16; i++)
913 regs[i] = pv_register (i, 0);
914 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
915
916 while (start < limit)
917 {
918 unsigned short insn;
919 gdb::optional<bool> ra_signed_state;
920
921 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
922
923 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
924 {
925 int regno;
926 int mask;
927
928 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
929 break;
930
931 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
932 whether to save LR (R14). */
933 mask = (insn & 0xff) | ((insn & 0x100) << 6);
934
935 /* Calculate offsets of saved R0-R7 and LR. */
936 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
937 if (mask & (1 << regno))
938 {
939 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
940 -4);
941 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
942 }
943 }
944 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
945 {
946 offset = (insn & 0x7f) << 2; /* get scaled offset */
947 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
948 -offset);
949 }
950 else if (thumb_instruction_restores_sp (insn))
951 {
952 /* Don't scan past the epilogue. */
953 break;
954 }
955 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
956 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
957 (insn & 0xff) << 2);
958 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
959 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
960 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
961 bits (insn, 6, 8));
962 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
963 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
964 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
965 bits (insn, 0, 7));
966 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
967 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
968 && pv_is_constant (regs[bits (insn, 3, 5)]))
969 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
970 regs[bits (insn, 6, 8)]);
971 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
972 && pv_is_constant (regs[bits (insn, 3, 6)]))
973 {
974 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
975 int rm = bits (insn, 3, 6);
976 regs[rd] = pv_add (regs[rd], regs[rm]);
977 }
978 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
979 {
980 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
981 int src_reg = (insn & 0x78) >> 3;
982 regs[dst_reg] = regs[src_reg];
983 }
984 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
985 {
986 /* Handle stores to the stack. Normally pushes are used,
987 but with GCC -mtpcs-frame, there may be other stores
988 in the prologue to create the frame. */
989 int regno = (insn >> 8) & 0x7;
990 pv_t addr;
991
992 offset = (insn & 0xff) << 2;
993 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
994
995 if (stack.store_would_trash (addr))
996 break;
997
998 stack.store (addr, 4, regs[regno]);
999 }
1000 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1001 {
1002 int rd = bits (insn, 0, 2);
1003 int rn = bits (insn, 3, 5);
1004 pv_t addr;
1005
1006 offset = bits (insn, 6, 10) << 2;
1007 addr = pv_add_constant (regs[rn], offset);
1008
1009 if (stack.store_would_trash (addr))
1010 break;
1011
1012 stack.store (addr, 4, regs[rd]);
1013 }
1014 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1015 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1016 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1017 /* Ignore stores of argument registers to the stack. */
1018 ;
1019 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1020 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1021 /* Ignore block loads from the stack, potentially copying
1022 parameters from memory. */
1023 ;
1024 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1025 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1026 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1027 /* Similarly ignore single loads from the stack. */
1028 ;
1029 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1030 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1031 /* Skip register copies, i.e. saves to another register
1032 instead of the stack. */
1033 ;
1034 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1035 /* Recognize constant loads; even with small stacks these are necessary
1036 on Thumb. */
1037 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1038 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1039 {
1040 /* Constant pool loads, for the same reason. */
1041 unsigned int constant;
1042 CORE_ADDR loc;
1043
1044 loc = start + 4 + bits (insn, 0, 7) * 4;
1045 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1046 regs[bits (insn, 8, 10)] = pv_constant (constant);
1047 }
1048 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1049 {
1050 unsigned short inst2;
1051
1052 inst2 = read_code_unsigned_integer (start + 2, 2,
1053 byte_order_for_code);
1054 uint32_t whole_insn = (insn << 16) | inst2;
1055
1056 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1057 {
1058 /* BL, BLX. Allow some special function calls when
1059 skipping the prologue; GCC generates these before
1060 storing arguments to the stack. */
1061 CORE_ADDR nextpc;
1062 int j1, j2, imm1, imm2;
1063
1064 imm1 = sbits (insn, 0, 10);
1065 imm2 = bits (inst2, 0, 10);
1066 j1 = bit (inst2, 13);
1067 j2 = bit (inst2, 11);
1068
1069 offset = ((imm1 << 12) + (imm2 << 1));
1070 offset ^= ((!j2) << 22) | ((!j1) << 23);
1071
1072 nextpc = start + 4 + offset;
1073 /* For BLX make sure to clear the low bits. */
1074 if (bit (inst2, 12) == 0)
1075 nextpc = nextpc & 0xfffffffc;
1076
1077 if (!skip_prologue_function (gdbarch, nextpc,
1078 bit (inst2, 12) != 0))
1079 break;
1080 }
1081
1082 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1083 { registers } */
1084 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1085 {
1086 pv_t addr = regs[bits (insn, 0, 3)];
1087 int regno;
1088
1089 if (stack.store_would_trash (addr))
1090 break;
1091
1092 /* Calculate offsets of saved registers. */
1093 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1094 if (inst2 & (1 << regno))
1095 {
1096 addr = pv_add_constant (addr, -4);
1097 stack.store (addr, 4, regs[regno]);
1098 }
1099
1100 if (insn & 0x0020)
1101 regs[bits (insn, 0, 3)] = addr;
1102 }
1103
1104 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1105 else if ((insn & 0xff20) == 0xed20
1106 && (inst2 & 0x0f00) == 0x0b00
1107 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1108 {
1109 /* Address SP points to. */
1110 pv_t addr = regs[bits (insn, 0, 3)];
1111
1112 /* Number of registers saved. */
1113 unsigned int number = bits (inst2, 0, 7) >> 1;
1114
1115 /* First register to save. */
1116 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1117
1118 if (stack.store_would_trash (addr))
1119 break;
1120
1121 /* Calculate offsets of saved registers. */
1122 for (; number > 0; number--)
1123 {
1124 addr = pv_add_constant (addr, -8);
1125 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1126 + vd + number, 0));
1127 }
1128
1129 /* Writeback SP to account for the saved registers. */
1130 regs[bits (insn, 0, 3)] = addr;
1131 }
1132
1133 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1134 [Rn, #+/-imm]{!} */
1135 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1136 {
1137 int regno1 = bits (inst2, 12, 15);
1138 int regno2 = bits (inst2, 8, 11);
1139 pv_t addr = regs[bits (insn, 0, 3)];
1140
1141 offset = inst2 & 0xff;
1142 if (insn & 0x0080)
1143 addr = pv_add_constant (addr, offset);
1144 else
1145 addr = pv_add_constant (addr, -offset);
1146
1147 if (stack.store_would_trash (addr))
1148 break;
1149
1150 stack.store (addr, 4, regs[regno1]);
1151 stack.store (pv_add_constant (addr, 4),
1152 4, regs[regno2]);
1153
1154 if (insn & 0x0020)
1155 regs[bits (insn, 0, 3)] = addr;
1156 }
1157
1158 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1159 && (inst2 & 0x0c00) == 0x0c00
1160 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1161 {
1162 int regno = bits (inst2, 12, 15);
1163 pv_t addr = regs[bits (insn, 0, 3)];
1164
1165 offset = inst2 & 0xff;
1166 if (inst2 & 0x0200)
1167 addr = pv_add_constant (addr, offset);
1168 else
1169 addr = pv_add_constant (addr, -offset);
1170
1171 if (stack.store_would_trash (addr))
1172 break;
1173
1174 stack.store (addr, 4, regs[regno]);
1175
1176 if (inst2 & 0x0100)
1177 regs[bits (insn, 0, 3)] = addr;
1178 }
1179
1180 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1181 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1182 {
1183 int regno = bits (inst2, 12, 15);
1184 pv_t addr;
1185
1186 offset = inst2 & 0xfff;
1187 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1188
1189 if (stack.store_would_trash (addr))
1190 break;
1191
1192 stack.store (addr, 4, regs[regno]);
1193 }
1194
1195 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1196 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1197 /* Ignore stores of argument registers to the stack. */
1198 ;
1199
1200 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1201 && (inst2 & 0x0d00) == 0x0c00
1202 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1203 /* Ignore stores of argument registers to the stack. */
1204 ;
1205
1206 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1207 { registers } */
1208 && (inst2 & 0x8000) == 0x0000
1209 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1210 /* Ignore block loads from the stack, potentially copying
1211 parameters from memory. */
1212 ;
1213
1214 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1215 [Rn, #+/-imm] */
1216 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1217 /* Similarly ignore dual loads from the stack. */
1218 ;
1219
1220 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1221 && (inst2 & 0x0d00) == 0x0c00
1222 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1223 /* Similarly ignore single loads from the stack. */
1224 ;
1225
1226 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1227 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1228 /* Similarly ignore single loads from the stack. */
1229 ;
1230
1231 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1232 && (inst2 & 0x8000) == 0x0000)
1233 {
1234 unsigned int imm = ((bits (insn, 10, 10) << 11)
1235 | (bits (inst2, 12, 14) << 8)
1236 | bits (inst2, 0, 7));
1237
1238 regs[bits (inst2, 8, 11)]
1239 = pv_add_constant (regs[bits (insn, 0, 3)],
1240 thumb_expand_immediate (imm));
1241 }
1242
1243 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1244 && (inst2 & 0x8000) == 0x0000)
1245 {
1246 unsigned int imm = ((bits (insn, 10, 10) << 11)
1247 | (bits (inst2, 12, 14) << 8)
1248 | bits (inst2, 0, 7));
1249
1250 regs[bits (inst2, 8, 11)]
1251 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1252 }
1253
1254 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1255 && (inst2 & 0x8000) == 0x0000)
1256 {
1257 unsigned int imm = ((bits (insn, 10, 10) << 11)
1258 | (bits (inst2, 12, 14) << 8)
1259 | bits (inst2, 0, 7));
1260
1261 regs[bits (inst2, 8, 11)]
1262 = pv_add_constant (regs[bits (insn, 0, 3)],
1263 - (CORE_ADDR) thumb_expand_immediate (imm));
1264 }
1265
1266 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1267 && (inst2 & 0x8000) == 0x0000)
1268 {
1269 unsigned int imm = ((bits (insn, 10, 10) << 11)
1270 | (bits (inst2, 12, 14) << 8)
1271 | bits (inst2, 0, 7));
1272
1273 regs[bits (inst2, 8, 11)]
1274 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1275 }
1276
1277 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1278 {
1279 unsigned int imm = ((bits (insn, 10, 10) << 11)
1280 | (bits (inst2, 12, 14) << 8)
1281 | bits (inst2, 0, 7));
1282
1283 regs[bits (inst2, 8, 11)]
1284 = pv_constant (thumb_expand_immediate (imm));
1285 }
1286
1287 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1288 {
1289 unsigned int imm
1290 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1291
1292 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1293 }
1294
1295 else if (insn == 0xea5f /* mov.w Rd,Rm */
1296 && (inst2 & 0xf0f0) == 0)
1297 {
1298 int dst_reg = (inst2 & 0x0f00) >> 8;
1299 int src_reg = inst2 & 0xf;
1300 regs[dst_reg] = regs[src_reg];
1301 }
1302
1303 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1304 {
1305 /* Constant pool loads. */
1306 unsigned int constant;
1307 CORE_ADDR loc;
1308
1309 offset = bits (inst2, 0, 11);
1310 if (insn & 0x0080)
1311 loc = start + 4 + offset;
1312 else
1313 loc = start + 4 - offset;
1314
1315 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1316 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1317 }
1318
1319 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1320 {
1321 /* Constant pool loads. */
1322 unsigned int constant;
1323 CORE_ADDR loc;
1324
1325 offset = bits (inst2, 0, 7) << 2;
1326 if (insn & 0x0080)
1327 loc = start + 4 + offset;
1328 else
1329 loc = start + 4 - offset;
1330
1331 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1332 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1333
1334 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1335 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1336 }
1337 /* Start of ARMv8.1-m PACBTI extension instructions. */
1338 else if (IS_PAC (whole_insn))
1339 {
1340 /* LR and SP are input registers. PAC is in R12. LR is
1341 signed from this point onwards. NOP space. */
1342 ra_signed_state = true;
1343 }
1344 else if (IS_PACBTI (whole_insn))
1345 {
1346 /* LR and SP are input registers. PAC is in R12 and PC is a
1347 valid BTI landing pad. LR is signed from this point onwards.
1348 NOP space. */
1349 ra_signed_state = true;
1350 }
1351 else if (IS_BTI (whole_insn))
1352 {
1353 /* Valid BTI landing pad. NOP space. */
1354 }
1355 else if (IS_PACG (whole_insn))
1356 {
1357 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1358 this point onwards. */
1359 ra_signed_state = true;
1360 }
1361 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1362 {
1363 /* These instructions appear close to the epilogue, when signed
1364 pointers are getting authenticated. */
1365 ra_signed_state = false;
1366 }
1367 /* End of ARMv8.1-m PACBTI extension instructions */
1368 else if (thumb2_instruction_changes_pc (insn, inst2))
1369 {
1370 /* Don't scan past anything that might change control flow. */
1371 break;
1372 }
1373 else
1374 {
1375 /* The optimizer might shove anything into the prologue,
1376 so we just skip what we don't recognize. */
1377 unrecognized_pc = start;
1378 }
1379
1380 arm_gdbarch_tdep *tdep
1381 = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1382
1383 /* Make sure we are dealing with a target that supports ARMv8.1-m
1384 PACBTI. */
1385 if (cache != nullptr && tdep->have_pacbti
1386 && ra_signed_state.has_value ())
1387 {
1388 arm_debug_printf ("Found pacbti instruction at %s",
1389 paddress (gdbarch, start));
1390 arm_debug_printf ("RA is %s",
1391 *ra_signed_state? "signed" : "not signed");
1392 cache->ra_signed_state = ra_signed_state;
1393 }
1394
1395 start += 2;
1396 }
1397 else if (thumb_instruction_changes_pc (insn))
1398 {
1399 /* Don't scan past anything that might change control flow. */
1400 break;
1401 }
1402 else
1403 {
1404 /* The optimizer might shove anything into the prologue,
1405 so we just skip what we don't recognize. */
1406 unrecognized_pc = start;
1407 }
1408
1409 start += 2;
1410 }
1411
1412 arm_debug_printf ("Prologue scan stopped at %s",
1413 paddress (gdbarch, start));
1414
1415 if (unrecognized_pc == 0)
1416 unrecognized_pc = start;
1417
1418 if (cache == NULL)
1419 return unrecognized_pc;
1420
1421 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1422 {
1423 /* Frame pointer is fp. Frame size is constant. */
1424 cache->framereg = ARM_FP_REGNUM;
1425 cache->framesize = -regs[ARM_FP_REGNUM].k;
1426 }
1427 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1428 {
1429 /* Frame pointer is r7. Frame size is constant. */
1430 cache->framereg = THUMB_FP_REGNUM;
1431 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1432 }
1433 else
1434 {
1435 /* Try the stack pointer... this is a bit desperate. */
1436 cache->framereg = ARM_SP_REGNUM;
1437 cache->framesize = -regs[ARM_SP_REGNUM].k;
1438 }
1439
1440 for (i = 0; i < 16; i++)
1441 if (stack.find_reg (gdbarch, i, &offset))
1442 cache->saved_regs[i].set_addr (offset);
1443
1444 return unrecognized_pc;
1445 }
1446
1447
1448 /* Try to analyze the instructions starting from PC, which load symbol
1449 __stack_chk_guard. Return the address of instruction after loading this
1450 symbol, set the dest register number to *BASEREG, and set the size of
1451 instructions for loading symbol in OFFSET. Return 0 if instructions are
1452 not recognized. */
1453
1454 static CORE_ADDR
1455 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1456 unsigned int *destreg, int *offset)
1457 {
1458 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1459 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1460 unsigned int low, high, address;
1461
1462 address = 0;
1463 if (is_thumb)
1464 {
1465 unsigned short insn1
1466 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1467
1468 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1469 {
1470 *destreg = bits (insn1, 8, 10);
1471 *offset = 2;
1472 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1473 address = read_memory_unsigned_integer (address, 4,
1474 byte_order_for_code);
1475 }
1476 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1477 {
1478 unsigned short insn2
1479 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1480
1481 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1482
1483 insn1
1484 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1485 insn2
1486 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1487
1488 /* movt Rd, #const */
1489 if ((insn1 & 0xfbc0) == 0xf2c0)
1490 {
1491 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1492 *destreg = bits (insn2, 8, 11);
1493 *offset = 8;
1494 address = (high << 16 | low);
1495 }
1496 }
1497 }
1498 else
1499 {
1500 unsigned int insn
1501 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1502
1503 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1504 {
1505 address = bits (insn, 0, 11) + pc + 8;
1506 address = read_memory_unsigned_integer (address, 4,
1507 byte_order_for_code);
1508
1509 *destreg = bits (insn, 12, 15);
1510 *offset = 4;
1511 }
1512 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1513 {
1514 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1515
1516 insn
1517 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1518
1519 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1520 {
1521 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1522 *destreg = bits (insn, 12, 15);
1523 *offset = 8;
1524 address = (high << 16 | low);
1525 }
1526 }
1527 }
1528
1529 return address;
1530 }
1531
1532 /* Try to skip a sequence of instructions used for stack protector. If PC
1533 points to the first instruction of this sequence, return the address of
1534 first instruction after this sequence, otherwise, return original PC.
1535
1536 On arm, this sequence of instructions is composed of mainly three steps,
1537 Step 1: load symbol __stack_chk_guard,
1538 Step 2: load from address of __stack_chk_guard,
1539 Step 3: store it to somewhere else.
1540
1541 Usually, instructions on step 2 and step 3 are the same on various ARM
1542 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1543 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1544 instructions in step 1 vary from different ARM architectures. On ARMv7,
1545 they are,
1546
1547 movw Rn, #:lower16:__stack_chk_guard
1548 movt Rn, #:upper16:__stack_chk_guard
1549
1550 On ARMv5t, it is,
1551
1552 ldr Rn, .Label
1553 ....
1554 .Lable:
1555 .word __stack_chk_guard
1556
1557 Since ldr/str is a very popular instruction, we can't use them as
1558 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1559 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1560 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1561
1562 static CORE_ADDR
1563 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1564 {
1565 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1566 unsigned int basereg;
1567 struct bound_minimal_symbol stack_chk_guard;
1568 int offset;
1569 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1570 CORE_ADDR addr;
1571
1572 /* Try to parse the instructions in Step 1. */
1573 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1574 &basereg, &offset);
1575 if (!addr)
1576 return pc;
1577
1578 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1579 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1580 Otherwise, this sequence cannot be for stack protector. */
1581 if (stack_chk_guard.minsym == NULL
1582 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1583 return pc;
1584
1585 if (is_thumb)
1586 {
1587 unsigned int destreg;
1588 unsigned short insn
1589 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1590
1591 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1592 if ((insn & 0xf800) != 0x6800)
1593 return pc;
1594 if (bits (insn, 3, 5) != basereg)
1595 return pc;
1596 destreg = bits (insn, 0, 2);
1597
1598 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1599 byte_order_for_code);
1600 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1601 if ((insn & 0xf800) != 0x6000)
1602 return pc;
1603 if (destreg != bits (insn, 0, 2))
1604 return pc;
1605 }
1606 else
1607 {
1608 unsigned int destreg;
1609 unsigned int insn
1610 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1611
1612 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1613 if ((insn & 0x0e500000) != 0x04100000)
1614 return pc;
1615 if (bits (insn, 16, 19) != basereg)
1616 return pc;
1617 destreg = bits (insn, 12, 15);
1618 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1619 insn = read_code_unsigned_integer (pc + offset + 4,
1620 4, byte_order_for_code);
1621 if ((insn & 0x0e500000) != 0x04000000)
1622 return pc;
1623 if (bits (insn, 12, 15) != destreg)
1624 return pc;
1625 }
1626 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1627 on arm. */
1628 if (is_thumb)
1629 return pc + offset + 4;
1630 else
1631 return pc + offset + 8;
1632 }
1633
1634 /* Advance the PC across any function entry prologue instructions to
1635 reach some "real" code.
1636
1637 The APCS (ARM Procedure Call Standard) defines the following
1638 prologue:
1639
1640 mov ip, sp
1641 [stmfd sp!, {a1,a2,a3,a4}]
1642 stmfd sp!, {...,fp,ip,lr,pc}
1643 [stfe f7, [sp, #-12]!]
1644 [stfe f6, [sp, #-12]!]
1645 [stfe f5, [sp, #-12]!]
1646 [stfe f4, [sp, #-12]!]
1647 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1648
1649 static CORE_ADDR
1650 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1651 {
1652 CORE_ADDR func_addr, limit_pc;
1653
1654 /* See if we can determine the end of the prologue via the symbol table.
1655 If so, then return either PC, or the PC after the prologue, whichever
1656 is greater. */
1657 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1658 {
1659 CORE_ADDR post_prologue_pc
1660 = skip_prologue_using_sal (gdbarch, func_addr);
1661 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1662
1663 if (post_prologue_pc)
1664 post_prologue_pc
1665 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1666
1667
1668 /* GCC always emits a line note before the prologue and another
1669 one after, even if the two are at the same address or on the
1670 same line. Take advantage of this so that we do not need to
1671 know every instruction that might appear in the prologue. We
1672 will have producer information for most binaries; if it is
1673 missing (e.g. for -gstabs), assuming the GNU tools. */
1674 if (post_prologue_pc
1675 && (cust == NULL
1676 || cust->producer () == NULL
1677 || startswith (cust->producer (), "GNU ")
1678 || producer_is_llvm (cust->producer ())))
1679 return post_prologue_pc;
1680
1681 if (post_prologue_pc != 0)
1682 {
1683 CORE_ADDR analyzed_limit;
1684
1685 /* For non-GCC compilers, make sure the entire line is an
1686 acceptable prologue; GDB will round this function's
1687 return value up to the end of the following line so we
1688 can not skip just part of a line (and we do not want to).
1689
1690 RealView does not treat the prologue specially, but does
1691 associate prologue code with the opening brace; so this
1692 lets us skip the first line if we think it is the opening
1693 brace. */
1694 if (arm_pc_is_thumb (gdbarch, func_addr))
1695 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1696 post_prologue_pc, NULL);
1697 else
1698 analyzed_limit
1699 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1700 NULL, target_arm_instruction_reader ());
1701
1702 if (analyzed_limit != post_prologue_pc)
1703 return func_addr;
1704
1705 return post_prologue_pc;
1706 }
1707 }
1708
1709 /* Can't determine prologue from the symbol table, need to examine
1710 instructions. */
1711
1712 /* Find an upper limit on the function prologue using the debug
1713 information. If the debug information could not be used to provide
1714 that bound, then use an arbitrary large number as the upper bound. */
1715 /* Like arm_scan_prologue, stop no later than pc + 64. */
1716 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1717 if (limit_pc == 0)
1718 limit_pc = pc + 64; /* Magic. */
1719
1720
1721 /* Check if this is Thumb code. */
1722 if (arm_pc_is_thumb (gdbarch, pc))
1723 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1724 else
1725 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1726 target_arm_instruction_reader ());
1727 }
1728
1729 /* *INDENT-OFF* */
1730 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1731 This function decodes a Thumb function prologue to determine:
1732 1) the size of the stack frame
1733 2) which registers are saved on it
1734 3) the offsets of saved regs
1735 4) the offset from the stack pointer to the frame pointer
1736
1737 A typical Thumb function prologue would create this stack frame
1738 (offsets relative to FP)
1739 old SP -> 24 stack parameters
1740 20 LR
1741 16 R7
1742 R7 -> 0 local variables (16 bytes)
1743 SP -> -12 additional stack space (12 bytes)
1744 The frame size would thus be 36 bytes, and the frame offset would be
1745 12 bytes. The frame register is R7.
1746
1747 The comments for thumb_skip_prolog() describe the algorithm we use
1748 to detect the end of the prolog. */
1749 /* *INDENT-ON* */
1750
1751 static void
1752 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1753 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1754 {
1755 CORE_ADDR prologue_start;
1756 CORE_ADDR prologue_end;
1757
1758 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1759 &prologue_end))
1760 {
1761 /* See comment in arm_scan_prologue for an explanation of
1762 this heuristics. */
1763 if (prologue_end > prologue_start + 64)
1764 {
1765 prologue_end = prologue_start + 64;
1766 }
1767 }
1768 else
1769 /* We're in the boondocks: we have no idea where the start of the
1770 function is. */
1771 return;
1772
1773 prologue_end = std::min (prologue_end, prev_pc);
1774
1775 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1776 }
1777
1778 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1779 otherwise. */
1780
1781 static int
1782 arm_instruction_restores_sp (unsigned int insn)
1783 {
1784 if (bits (insn, 28, 31) != INST_NV)
1785 {
1786 if ((insn & 0x0df0f000) == 0x0080d000
1787 /* ADD SP (register or immediate). */
1788 || (insn & 0x0df0f000) == 0x0040d000
1789 /* SUB SP (register or immediate). */
1790 || (insn & 0x0ffffff0) == 0x01a0d000
1791 /* MOV SP. */
1792 || (insn & 0x0fff0000) == 0x08bd0000
1793 /* POP (LDMIA). */
1794 || (insn & 0x0fff0000) == 0x049d0000)
1795 /* POP of a single register. */
1796 return 1;
1797 }
1798
1799 return 0;
1800 }
1801
1802 /* Implement immediate value decoding, as described in section A5.2.4
1803 (Modified immediate constants in ARM instructions) of the ARM Architecture
1804 Reference Manual (ARMv7-A and ARMv7-R edition). */
1805
1806 static uint32_t
1807 arm_expand_immediate (uint32_t imm)
1808 {
1809 /* Immediate values are 12 bits long. */
1810 gdb_assert ((imm & 0xfffff000) == 0);
1811
1812 uint32_t unrotated_value = imm & 0xff;
1813 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1814
1815 if (rotate_amount == 0)
1816 return unrotated_value;
1817
1818 return ((unrotated_value >> rotate_amount)
1819 | (unrotated_value << (32 - rotate_amount)));
1820 }
1821
1822 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1823 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1824 fill it in. Return the first address not recognized as a prologue
1825 instruction.
1826
1827 We recognize all the instructions typically found in ARM prologues,
1828 plus harmless instructions which can be skipped (either for analysis
1829 purposes, or a more restrictive set that can be skipped when finding
1830 the end of the prologue). */
1831
1832 static CORE_ADDR
1833 arm_analyze_prologue (struct gdbarch *gdbarch,
1834 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1835 struct arm_prologue_cache *cache,
1836 const arm_instruction_reader &insn_reader)
1837 {
1838 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1839 int regno;
1840 CORE_ADDR offset, current_pc;
1841 pv_t regs[ARM_FPS_REGNUM];
1842 CORE_ADDR unrecognized_pc = 0;
1843 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1844
1845 /* Search the prologue looking for instructions that set up the
1846 frame pointer, adjust the stack pointer, and save registers.
1847
1848 Be careful, however, and if it doesn't look like a prologue,
1849 don't try to scan it. If, for instance, a frameless function
1850 begins with stmfd sp!, then we will tell ourselves there is
1851 a frame, which will confuse stack traceback, as well as "finish"
1852 and other operations that rely on a knowledge of the stack
1853 traceback. */
1854
1855 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1856 regs[regno] = pv_register (regno, 0);
1857 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1858
1859 for (current_pc = prologue_start;
1860 current_pc < prologue_end;
1861 current_pc += 4)
1862 {
1863 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1864
1865 if (insn == 0xe1a0c00d) /* mov ip, sp */
1866 {
1867 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1868 continue;
1869 }
1870 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1871 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1872 {
1873 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1874 int rd = bits (insn, 12, 15);
1875 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1876 continue;
1877 }
1878 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1879 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1880 {
1881 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1882 int rd = bits (insn, 12, 15);
1883 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1884 continue;
1885 }
1886 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1887 [sp, #-4]! */
1888 {
1889 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1890 break;
1891 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1892 stack.store (regs[ARM_SP_REGNUM], 4,
1893 regs[bits (insn, 12, 15)]);
1894 continue;
1895 }
1896 else if ((insn & 0xffff0000) == 0xe92d0000)
1897 /* stmfd sp!, {..., fp, ip, lr, pc}
1898 or
1899 stmfd sp!, {a1, a2, a3, a4} */
1900 {
1901 int mask = insn & 0xffff;
1902
1903 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1904 break;
1905
1906 /* Calculate offsets of saved registers. */
1907 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1908 if (mask & (1 << regno))
1909 {
1910 regs[ARM_SP_REGNUM]
1911 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1912 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1913 }
1914 }
1915 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1916 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1917 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1918 {
1919 /* No need to add this to saved_regs -- it's just an arg reg. */
1920 continue;
1921 }
1922 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1923 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1924 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1925 {
1926 /* No need to add this to saved_regs -- it's just an arg reg. */
1927 continue;
1928 }
1929 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1930 { registers } */
1931 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1932 {
1933 /* No need to add this to saved_regs -- it's just arg regs. */
1934 continue;
1935 }
1936 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1937 {
1938 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1939 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1940 }
1941 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1942 {
1943 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1944 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1945 }
1946 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1947 [sp, -#c]! */
1948 && tdep->have_fpa_registers)
1949 {
1950 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1951 break;
1952
1953 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1954 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1955 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1956 }
1957 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1958 [sp!] */
1959 && tdep->have_fpa_registers)
1960 {
1961 int n_saved_fp_regs;
1962 unsigned int fp_start_reg, fp_bound_reg;
1963
1964 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1965 break;
1966
1967 if ((insn & 0x800) == 0x800) /* N0 is set */
1968 {
1969 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1970 n_saved_fp_regs = 3;
1971 else
1972 n_saved_fp_regs = 1;
1973 }
1974 else
1975 {
1976 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1977 n_saved_fp_regs = 2;
1978 else
1979 n_saved_fp_regs = 4;
1980 }
1981
1982 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1983 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1984 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1985 {
1986 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1987 stack.store (regs[ARM_SP_REGNUM], 12,
1988 regs[fp_start_reg++]);
1989 }
1990 }
1991 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1992 {
1993 /* Allow some special function calls when skipping the
1994 prologue; GCC generates these before storing arguments to
1995 the stack. */
1996 CORE_ADDR dest = BranchDest (current_pc, insn);
1997
1998 if (skip_prologue_function (gdbarch, dest, 0))
1999 continue;
2000 else
2001 break;
2002 }
2003 else if ((insn & 0xf0000000) != 0xe0000000)
2004 break; /* Condition not true, exit early. */
2005 else if (arm_instruction_changes_pc (insn))
2006 /* Don't scan past anything that might change control flow. */
2007 break;
2008 else if (arm_instruction_restores_sp (insn))
2009 {
2010 /* Don't scan past the epilogue. */
2011 break;
2012 }
2013 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2014 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2015 /* Ignore block loads from the stack, potentially copying
2016 parameters from memory. */
2017 continue;
2018 else if ((insn & 0xfc500000) == 0xe4100000
2019 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2020 /* Similarly ignore single loads from the stack. */
2021 continue;
2022 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2023 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2024 register instead of the stack. */
2025 continue;
2026 else
2027 {
2028 /* The optimizer might shove anything into the prologue, if
2029 we build up cache (cache != NULL) from scanning prologue,
2030 we just skip what we don't recognize and scan further to
2031 make cache as complete as possible. However, if we skip
2032 prologue, we'll stop immediately on unrecognized
2033 instruction. */
2034 unrecognized_pc = current_pc;
2035 if (cache != NULL)
2036 continue;
2037 else
2038 break;
2039 }
2040 }
2041
2042 if (unrecognized_pc == 0)
2043 unrecognized_pc = current_pc;
2044
2045 if (cache)
2046 {
2047 int framereg, framesize;
2048
2049 /* The frame size is just the distance from the frame register
2050 to the original stack pointer. */
2051 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2052 {
2053 /* Frame pointer is fp. */
2054 framereg = ARM_FP_REGNUM;
2055 framesize = -regs[ARM_FP_REGNUM].k;
2056 }
2057 else
2058 {
2059 /* Try the stack pointer... this is a bit desperate. */
2060 framereg = ARM_SP_REGNUM;
2061 framesize = -regs[ARM_SP_REGNUM].k;
2062 }
2063
2064 cache->framereg = framereg;
2065 cache->framesize = framesize;
2066
2067 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2068 if (stack.find_reg (gdbarch, regno, &offset))
2069 cache->saved_regs[regno].set_addr (offset);
2070 }
2071
2072 arm_debug_printf ("Prologue scan stopped at %s",
2073 paddress (gdbarch, unrecognized_pc));
2074
2075 return unrecognized_pc;
2076 }
2077
2078 static void
2079 arm_scan_prologue (struct frame_info *this_frame,
2080 struct arm_prologue_cache *cache)
2081 {
2082 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2083 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2084 CORE_ADDR prologue_start, prologue_end;
2085 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2086 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2087 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2088
2089 /* Assume there is no frame until proven otherwise. */
2090 cache->framereg = ARM_SP_REGNUM;
2091 cache->framesize = 0;
2092
2093 /* Check for Thumb prologue. */
2094 if (arm_frame_is_thumb (this_frame))
2095 {
2096 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2097 return;
2098 }
2099
2100 /* Find the function prologue. If we can't find the function in
2101 the symbol table, peek in the stack frame to find the PC. */
2102 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2103 &prologue_end))
2104 {
2105 /* One way to find the end of the prologue (which works well
2106 for unoptimized code) is to do the following:
2107
2108 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2109
2110 if (sal.line == 0)
2111 prologue_end = prev_pc;
2112 else if (sal.end < prologue_end)
2113 prologue_end = sal.end;
2114
2115 This mechanism is very accurate so long as the optimizer
2116 doesn't move any instructions from the function body into the
2117 prologue. If this happens, sal.end will be the last
2118 instruction in the first hunk of prologue code just before
2119 the first instruction that the scheduler has moved from
2120 the body to the prologue.
2121
2122 In order to make sure that we scan all of the prologue
2123 instructions, we use a slightly less accurate mechanism which
2124 may scan more than necessary. To help compensate for this
2125 lack of accuracy, the prologue scanning loop below contains
2126 several clauses which'll cause the loop to terminate early if
2127 an implausible prologue instruction is encountered.
2128
2129 The expression
2130
2131 prologue_start + 64
2132
2133 is a suitable endpoint since it accounts for the largest
2134 possible prologue plus up to five instructions inserted by
2135 the scheduler. */
2136
2137 if (prologue_end > prologue_start + 64)
2138 {
2139 prologue_end = prologue_start + 64; /* See above. */
2140 }
2141 }
2142 else
2143 {
2144 /* We have no symbol information. Our only option is to assume this
2145 function has a standard stack frame and the normal frame register.
2146 Then, we can find the value of our frame pointer on entrance to
2147 the callee (or at the present moment if this is the innermost frame).
2148 The value stored there should be the address of the stmfd + 8. */
2149 CORE_ADDR frame_loc;
2150 ULONGEST return_value;
2151
2152 /* AAPCS does not use a frame register, so we can abort here. */
2153 if (tdep->arm_abi == ARM_ABI_AAPCS)
2154 return;
2155
2156 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2157 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2158 &return_value))
2159 return;
2160 else
2161 {
2162 prologue_start = gdbarch_addr_bits_remove
2163 (gdbarch, return_value) - 8;
2164 prologue_end = prologue_start + 64; /* See above. */
2165 }
2166 }
2167
2168 if (prev_pc < prologue_end)
2169 prologue_end = prev_pc;
2170
2171 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2172 target_arm_instruction_reader ());
2173 }
2174
2175 static struct arm_prologue_cache *
2176 arm_make_prologue_cache (struct frame_info *this_frame)
2177 {
2178 int reg;
2179 struct arm_prologue_cache *cache;
2180 CORE_ADDR unwound_fp;
2181
2182 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2183 arm_cache_init (cache, this_frame);
2184
2185 arm_scan_prologue (this_frame, cache);
2186
2187 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2188 if (unwound_fp == 0)
2189 return cache;
2190
2191 arm_gdbarch_tdep *tdep =
2192 (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2193
2194 arm_cache_set_active_sp_value (cache, tdep, unwound_fp + cache->framesize);
2195
2196 /* Calculate actual addresses of saved registers using offsets
2197 determined by arm_scan_prologue. */
2198 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2199 if (cache->saved_regs[reg].is_addr ())
2200 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
2201 + arm_cache_get_prev_sp_value (cache, tdep));
2202
2203 return cache;
2204 }
2205
2206 /* Implementation of the stop_reason hook for arm_prologue frames. */
2207
2208 static enum unwind_stop_reason
2209 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2210 void **this_cache)
2211 {
2212 struct arm_prologue_cache *cache;
2213 CORE_ADDR pc;
2214
2215 if (*this_cache == NULL)
2216 *this_cache = arm_make_prologue_cache (this_frame);
2217 cache = (struct arm_prologue_cache *) *this_cache;
2218
2219 /* This is meant to halt the backtrace at "_start". */
2220 pc = get_frame_pc (this_frame);
2221 gdbarch *arch = get_frame_arch (this_frame);
2222 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
2223 if (pc <= tdep->lowest_pc)
2224 return UNWIND_OUTERMOST;
2225
2226 /* If we've hit a wall, stop. */
2227 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2228 return UNWIND_OUTERMOST;
2229
2230 return UNWIND_NO_REASON;
2231 }
2232
2233 /* Our frame ID for a normal frame is the current function's starting PC
2234 and the caller's SP when we were called. */
2235
2236 static void
2237 arm_prologue_this_id (struct frame_info *this_frame,
2238 void **this_cache,
2239 struct frame_id *this_id)
2240 {
2241 struct arm_prologue_cache *cache;
2242 struct frame_id id;
2243 CORE_ADDR pc, func;
2244
2245 if (*this_cache == NULL)
2246 *this_cache = arm_make_prologue_cache (this_frame);
2247 cache = (struct arm_prologue_cache *) *this_cache;
2248
2249 arm_gdbarch_tdep *tdep
2250 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2251
2252 /* Use function start address as part of the frame ID. If we cannot
2253 identify the start address (due to missing symbol information),
2254 fall back to just using the current PC. */
2255 pc = get_frame_pc (this_frame);
2256 func = get_frame_func (this_frame);
2257 if (!func)
2258 func = pc;
2259
2260 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2261 *this_id = id;
2262 }
2263
2264 static struct value *
2265 arm_prologue_prev_register (struct frame_info *this_frame,
2266 void **this_cache,
2267 int prev_regnum)
2268 {
2269 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2270 struct arm_prologue_cache *cache;
2271 CORE_ADDR sp_value;
2272
2273 if (*this_cache == NULL)
2274 *this_cache = arm_make_prologue_cache (this_frame);
2275 cache = (struct arm_prologue_cache *) *this_cache;
2276
2277 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2278
2279 /* If this frame has signed the return address, mark it as so. */
2280 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2281 && *cache->ra_signed_state)
2282 set_frame_previous_pc_masked (this_frame);
2283
2284 /* If we are asked to unwind the PC, then we need to return the LR
2285 instead. The prologue may save PC, but it will point into this
2286 frame's prologue, not the next frame's resume location. Also
2287 strip the saved T bit. A valid LR may have the low bit set, but
2288 a valid PC never does. */
2289 if (prev_regnum == ARM_PC_REGNUM)
2290 {
2291 CORE_ADDR lr;
2292
2293 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2294 return frame_unwind_got_constant (this_frame, prev_regnum,
2295 arm_addr_bits_remove (gdbarch, lr));
2296 }
2297
2298 /* SP is generally not saved to the stack, but this frame is
2299 identified by the next frame's stack pointer at the time of the call.
2300 The value was already reconstructed into PREV_SP. */
2301 if (prev_regnum == ARM_SP_REGNUM)
2302 return frame_unwind_got_constant (this_frame, prev_regnum,
2303 arm_cache_get_prev_sp_value (cache, tdep));
2304
2305 /* The value might be one of the alternative SP, if so, use the
2306 value already constructed. */
2307 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
2308 {
2309 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2310 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2311 }
2312
2313 /* The CPSR may have been changed by the call instruction and by the
2314 called function. The only bit we can reconstruct is the T bit,
2315 by checking the low bit of LR as of the call. This is a reliable
2316 indicator of Thumb-ness except for some ARM v4T pre-interworking
2317 Thumb code, which could get away with a clear low bit as long as
2318 the called function did not use bx. Guess that all other
2319 bits are unchanged; the condition flags are presumably lost,
2320 but the processor status is likely valid. */
2321 if (prev_regnum == ARM_PS_REGNUM)
2322 {
2323 CORE_ADDR lr, cpsr;
2324 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2325
2326 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2327 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2328 if (IS_THUMB_ADDR (lr))
2329 cpsr |= t_bit;
2330 else
2331 cpsr &= ~t_bit;
2332 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2333 }
2334
2335 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2336 prev_regnum);
2337 }
2338
2339 static frame_unwind arm_prologue_unwind = {
2340 "arm prologue",
2341 NORMAL_FRAME,
2342 arm_prologue_unwind_stop_reason,
2343 arm_prologue_this_id,
2344 arm_prologue_prev_register,
2345 NULL,
2346 default_frame_sniffer
2347 };
2348
2349 /* Maintain a list of ARM exception table entries per objfile, similar to the
2350 list of mapping symbols. We only cache entries for standard ARM-defined
2351 personality routines; the cache will contain only the frame unwinding
2352 instructions associated with the entry (not the descriptors). */
2353
2354 struct arm_exidx_entry
2355 {
2356 CORE_ADDR addr;
2357 gdb_byte *entry;
2358
2359 bool operator< (const arm_exidx_entry &other) const
2360 {
2361 return addr < other.addr;
2362 }
2363 };
2364
2365 struct arm_exidx_data
2366 {
2367 std::vector<std::vector<arm_exidx_entry>> section_maps;
2368 };
2369
2370 /* Per-BFD key to store exception handling information. */
2371 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2372
2373 static struct obj_section *
2374 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2375 {
2376 struct obj_section *osect;
2377
2378 ALL_OBJFILE_OSECTIONS (objfile, osect)
2379 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2380 {
2381 bfd_vma start, size;
2382 start = bfd_section_vma (osect->the_bfd_section);
2383 size = bfd_section_size (osect->the_bfd_section);
2384
2385 if (start <= vma && vma < start + size)
2386 return osect;
2387 }
2388
2389 return NULL;
2390 }
2391
2392 /* Parse contents of exception table and exception index sections
2393 of OBJFILE, and fill in the exception table entry cache.
2394
2395 For each entry that refers to a standard ARM-defined personality
2396 routine, extract the frame unwinding instructions (from either
2397 the index or the table section). The unwinding instructions
2398 are normalized by:
2399 - extracting them from the rest of the table data
2400 - converting to host endianness
2401 - appending the implicit 0xb0 ("Finish") code
2402
2403 The extracted and normalized instructions are stored for later
2404 retrieval by the arm_find_exidx_entry routine. */
2405
2406 static void
2407 arm_exidx_new_objfile (struct objfile *objfile)
2408 {
2409 struct arm_exidx_data *data;
2410 asection *exidx, *extab;
2411 bfd_vma exidx_vma = 0, extab_vma = 0;
2412 LONGEST i;
2413
2414 /* If we've already touched this file, do nothing. */
2415 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2416 return;
2417
2418 /* Read contents of exception table and index. */
2419 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2420 gdb::byte_vector exidx_data;
2421 if (exidx)
2422 {
2423 exidx_vma = bfd_section_vma (exidx);
2424 exidx_data.resize (bfd_section_size (exidx));
2425
2426 if (!bfd_get_section_contents (objfile->obfd, exidx,
2427 exidx_data.data (), 0,
2428 exidx_data.size ()))
2429 return;
2430 }
2431
2432 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2433 gdb::byte_vector extab_data;
2434 if (extab)
2435 {
2436 extab_vma = bfd_section_vma (extab);
2437 extab_data.resize (bfd_section_size (extab));
2438
2439 if (!bfd_get_section_contents (objfile->obfd, extab,
2440 extab_data.data (), 0,
2441 extab_data.size ()))
2442 return;
2443 }
2444
2445 /* Allocate exception table data structure. */
2446 data = arm_exidx_data_key.emplace (objfile->obfd);
2447 data->section_maps.resize (objfile->obfd->section_count);
2448
2449 /* Fill in exception table. */
2450 for (i = 0; i < exidx_data.size () / 8; i++)
2451 {
2452 struct arm_exidx_entry new_exidx_entry;
2453 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2454 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2455 exidx_data.data () + i * 8 + 4);
2456 bfd_vma addr = 0, word = 0;
2457 int n_bytes = 0, n_words = 0;
2458 struct obj_section *sec;
2459 gdb_byte *entry = NULL;
2460
2461 /* Extract address of start of function. */
2462 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2463 idx += exidx_vma + i * 8;
2464
2465 /* Find section containing function and compute section offset. */
2466 sec = arm_obj_section_from_vma (objfile, idx);
2467 if (sec == NULL)
2468 continue;
2469 idx -= bfd_section_vma (sec->the_bfd_section);
2470
2471 /* Determine address of exception table entry. */
2472 if (val == 1)
2473 {
2474 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2475 }
2476 else if ((val & 0xff000000) == 0x80000000)
2477 {
2478 /* Exception table entry embedded in .ARM.exidx
2479 -- must be short form. */
2480 word = val;
2481 n_bytes = 3;
2482 }
2483 else if (!(val & 0x80000000))
2484 {
2485 /* Exception table entry in .ARM.extab. */
2486 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2487 addr += exidx_vma + i * 8 + 4;
2488
2489 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2490 {
2491 word = bfd_h_get_32 (objfile->obfd,
2492 extab_data.data () + addr - extab_vma);
2493 addr += 4;
2494
2495 if ((word & 0xff000000) == 0x80000000)
2496 {
2497 /* Short form. */
2498 n_bytes = 3;
2499 }
2500 else if ((word & 0xff000000) == 0x81000000
2501 || (word & 0xff000000) == 0x82000000)
2502 {
2503 /* Long form. */
2504 n_bytes = 2;
2505 n_words = ((word >> 16) & 0xff);
2506 }
2507 else if (!(word & 0x80000000))
2508 {
2509 bfd_vma pers;
2510 struct obj_section *pers_sec;
2511 int gnu_personality = 0;
2512
2513 /* Custom personality routine. */
2514 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2515 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2516
2517 /* Check whether we've got one of the variants of the
2518 GNU personality routines. */
2519 pers_sec = arm_obj_section_from_vma (objfile, pers);
2520 if (pers_sec)
2521 {
2522 static const char *personality[] =
2523 {
2524 "__gcc_personality_v0",
2525 "__gxx_personality_v0",
2526 "__gcj_personality_v0",
2527 "__gnu_objc_personality_v0",
2528 NULL
2529 };
2530
2531 CORE_ADDR pc = pers + pers_sec->offset ();
2532 int k;
2533
2534 for (k = 0; personality[k]; k++)
2535 if (lookup_minimal_symbol_by_pc_name
2536 (pc, personality[k], objfile))
2537 {
2538 gnu_personality = 1;
2539 break;
2540 }
2541 }
2542
2543 /* If so, the next word contains a word count in the high
2544 byte, followed by the same unwind instructions as the
2545 pre-defined forms. */
2546 if (gnu_personality
2547 && addr + 4 <= extab_vma + extab_data.size ())
2548 {
2549 word = bfd_h_get_32 (objfile->obfd,
2550 (extab_data.data ()
2551 + addr - extab_vma));
2552 addr += 4;
2553 n_bytes = 3;
2554 n_words = ((word >> 24) & 0xff);
2555 }
2556 }
2557 }
2558 }
2559
2560 /* Sanity check address. */
2561 if (n_words)
2562 if (addr < extab_vma
2563 || addr + 4 * n_words > extab_vma + extab_data.size ())
2564 n_words = n_bytes = 0;
2565
2566 /* The unwind instructions reside in WORD (only the N_BYTES least
2567 significant bytes are valid), followed by N_WORDS words in the
2568 extab section starting at ADDR. */
2569 if (n_bytes || n_words)
2570 {
2571 gdb_byte *p = entry
2572 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2573 n_bytes + n_words * 4 + 1);
2574
2575 while (n_bytes--)
2576 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2577
2578 while (n_words--)
2579 {
2580 word = bfd_h_get_32 (objfile->obfd,
2581 extab_data.data () + addr - extab_vma);
2582 addr += 4;
2583
2584 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2585 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2586 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2587 *p++ = (gdb_byte) (word & 0xff);
2588 }
2589
2590 /* Implied "Finish" to terminate the list. */
2591 *p++ = 0xb0;
2592 }
2593
2594 /* Push entry onto vector. They are guaranteed to always
2595 appear in order of increasing addresses. */
2596 new_exidx_entry.addr = idx;
2597 new_exidx_entry.entry = entry;
2598 data->section_maps[sec->the_bfd_section->index].push_back
2599 (new_exidx_entry);
2600 }
2601 }
2602
2603 /* Search for the exception table entry covering MEMADDR. If one is found,
2604 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2605 set *START to the start of the region covered by this entry. */
2606
2607 static gdb_byte *
2608 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2609 {
2610 struct obj_section *sec;
2611
2612 sec = find_pc_section (memaddr);
2613 if (sec != NULL)
2614 {
2615 struct arm_exidx_data *data;
2616 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2617
2618 data = arm_exidx_data_key.get (sec->objfile->obfd);
2619 if (data != NULL)
2620 {
2621 std::vector<arm_exidx_entry> &map
2622 = data->section_maps[sec->the_bfd_section->index];
2623 if (!map.empty ())
2624 {
2625 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2626
2627 /* std::lower_bound finds the earliest ordered insertion
2628 point. If the following symbol starts at this exact
2629 address, we use that; otherwise, the preceding
2630 exception table entry covers this address. */
2631 if (idx < map.end ())
2632 {
2633 if (idx->addr == map_key.addr)
2634 {
2635 if (start)
2636 *start = idx->addr + sec->addr ();
2637 return idx->entry;
2638 }
2639 }
2640
2641 if (idx > map.begin ())
2642 {
2643 idx = idx - 1;
2644 if (start)
2645 *start = idx->addr + sec->addr ();
2646 return idx->entry;
2647 }
2648 }
2649 }
2650 }
2651
2652 return NULL;
2653 }
2654
2655 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2656 instruction list from the ARM exception table entry ENTRY, allocate and
2657 return a prologue cache structure describing how to unwind this frame.
2658
2659 Return NULL if the unwinding instruction list contains a "spare",
2660 "reserved" or "refuse to unwind" instruction as defined in section
2661 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2662 for the ARM Architecture" document. */
2663
2664 static struct arm_prologue_cache *
2665 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2666 {
2667 CORE_ADDR vsp = 0;
2668 int vsp_valid = 0;
2669
2670 struct arm_prologue_cache *cache;
2671 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2672 arm_cache_init (cache, this_frame);
2673
2674 for (;;)
2675 {
2676 gdb_byte insn;
2677
2678 /* Whenever we reload SP, we actually have to retrieve its
2679 actual value in the current frame. */
2680 if (!vsp_valid)
2681 {
2682 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2683 {
2684 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2685 vsp = get_frame_register_unsigned (this_frame, reg);
2686 }
2687 else
2688 {
2689 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2690 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2691 }
2692
2693 vsp_valid = 1;
2694 }
2695
2696 /* Decode next unwind instruction. */
2697 insn = *entry++;
2698
2699 if ((insn & 0xc0) == 0)
2700 {
2701 int offset = insn & 0x3f;
2702 vsp += (offset << 2) + 4;
2703 }
2704 else if ((insn & 0xc0) == 0x40)
2705 {
2706 int offset = insn & 0x3f;
2707 vsp -= (offset << 2) + 4;
2708 }
2709 else if ((insn & 0xf0) == 0x80)
2710 {
2711 int mask = ((insn & 0xf) << 8) | *entry++;
2712 int i;
2713
2714 /* The special case of an all-zero mask identifies
2715 "Refuse to unwind". We return NULL to fall back
2716 to the prologue analyzer. */
2717 if (mask == 0)
2718 return NULL;
2719
2720 /* Pop registers r4..r15 under mask. */
2721 for (i = 0; i < 12; i++)
2722 if (mask & (1 << i))
2723 {
2724 cache->saved_regs[4 + i].set_addr (vsp);
2725 vsp += 4;
2726 }
2727
2728 /* Special-case popping SP -- we need to reload vsp. */
2729 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2730 vsp_valid = 0;
2731 }
2732 else if ((insn & 0xf0) == 0x90)
2733 {
2734 int reg = insn & 0xf;
2735
2736 /* Reserved cases. */
2737 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2738 return NULL;
2739
2740 /* Set SP from another register and mark VSP for reload. */
2741 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2742 vsp_valid = 0;
2743 }
2744 else if ((insn & 0xf0) == 0xa0)
2745 {
2746 int count = insn & 0x7;
2747 int pop_lr = (insn & 0x8) != 0;
2748 int i;
2749
2750 /* Pop r4..r[4+count]. */
2751 for (i = 0; i <= count; i++)
2752 {
2753 cache->saved_regs[4 + i].set_addr (vsp);
2754 vsp += 4;
2755 }
2756
2757 /* If indicated by flag, pop LR as well. */
2758 if (pop_lr)
2759 {
2760 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2761 vsp += 4;
2762 }
2763 }
2764 else if (insn == 0xb0)
2765 {
2766 /* We could only have updated PC by popping into it; if so, it
2767 will show up as address. Otherwise, copy LR into PC. */
2768 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2769 cache->saved_regs[ARM_PC_REGNUM]
2770 = cache->saved_regs[ARM_LR_REGNUM];
2771
2772 /* We're done. */
2773 break;
2774 }
2775 else if (insn == 0xb1)
2776 {
2777 int mask = *entry++;
2778 int i;
2779
2780 /* All-zero mask and mask >= 16 is "spare". */
2781 if (mask == 0 || mask >= 16)
2782 return NULL;
2783
2784 /* Pop r0..r3 under mask. */
2785 for (i = 0; i < 4; i++)
2786 if (mask & (1 << i))
2787 {
2788 cache->saved_regs[i].set_addr (vsp);
2789 vsp += 4;
2790 }
2791 }
2792 else if (insn == 0xb2)
2793 {
2794 ULONGEST offset = 0;
2795 unsigned shift = 0;
2796
2797 do
2798 {
2799 offset |= (*entry & 0x7f) << shift;
2800 shift += 7;
2801 }
2802 while (*entry++ & 0x80);
2803
2804 vsp += 0x204 + (offset << 2);
2805 }
2806 else if (insn == 0xb3)
2807 {
2808 int start = *entry >> 4;
2809 int count = (*entry++) & 0xf;
2810 int i;
2811
2812 /* Only registers D0..D15 are valid here. */
2813 if (start + count >= 16)
2814 return NULL;
2815
2816 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2817 for (i = 0; i <= count; i++)
2818 {
2819 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2820 vsp += 8;
2821 }
2822
2823 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2824 vsp += 4;
2825 }
2826 else if ((insn & 0xf8) == 0xb8)
2827 {
2828 int count = insn & 0x7;
2829 int i;
2830
2831 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2832 for (i = 0; i <= count; i++)
2833 {
2834 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2835 vsp += 8;
2836 }
2837
2838 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2839 vsp += 4;
2840 }
2841 else if (insn == 0xc6)
2842 {
2843 int start = *entry >> 4;
2844 int count = (*entry++) & 0xf;
2845 int i;
2846
2847 /* Only registers WR0..WR15 are valid. */
2848 if (start + count >= 16)
2849 return NULL;
2850
2851 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2852 for (i = 0; i <= count; i++)
2853 {
2854 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2855 vsp += 8;
2856 }
2857 }
2858 else if (insn == 0xc7)
2859 {
2860 int mask = *entry++;
2861 int i;
2862
2863 /* All-zero mask and mask >= 16 is "spare". */
2864 if (mask == 0 || mask >= 16)
2865 return NULL;
2866
2867 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2868 for (i = 0; i < 4; i++)
2869 if (mask & (1 << i))
2870 {
2871 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2872 vsp += 4;
2873 }
2874 }
2875 else if ((insn & 0xf8) == 0xc0)
2876 {
2877 int count = insn & 0x7;
2878 int i;
2879
2880 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2881 for (i = 0; i <= count; i++)
2882 {
2883 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2884 vsp += 8;
2885 }
2886 }
2887 else if (insn == 0xc8)
2888 {
2889 int start = *entry >> 4;
2890 int count = (*entry++) & 0xf;
2891 int i;
2892
2893 /* Only registers D0..D31 are valid. */
2894 if (start + count >= 16)
2895 return NULL;
2896
2897 /* Pop VFP double-precision registers
2898 D[16+start]..D[16+start+count]. */
2899 for (i = 0; i <= count; i++)
2900 {
2901 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2902 vsp += 8;
2903 }
2904 }
2905 else if (insn == 0xc9)
2906 {
2907 int start = *entry >> 4;
2908 int count = (*entry++) & 0xf;
2909 int i;
2910
2911 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2912 for (i = 0; i <= count; i++)
2913 {
2914 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2915 vsp += 8;
2916 }
2917 }
2918 else if ((insn & 0xf8) == 0xd0)
2919 {
2920 int count = insn & 0x7;
2921 int i;
2922
2923 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2924 for (i = 0; i <= count; i++)
2925 {
2926 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2927 vsp += 8;
2928 }
2929 }
2930 else
2931 {
2932 /* Everything else is "spare". */
2933 return NULL;
2934 }
2935 }
2936
2937 /* If we restore SP from a register, assume this was the frame register.
2938 Otherwise just fall back to SP as frame register. */
2939 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2940 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2941 else
2942 cache->framereg = ARM_SP_REGNUM;
2943
2944 /* Determine offset to previous frame. */
2945 cache->framesize
2946 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2947
2948 /* We already got the previous SP. */
2949 arm_gdbarch_tdep *tdep
2950 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2951 arm_cache_set_active_sp_value (cache, tdep, vsp);
2952
2953 return cache;
2954 }
2955
2956 /* Unwinding via ARM exception table entries. Note that the sniffer
2957 already computes a filled-in prologue cache, which is then used
2958 with the same arm_prologue_this_id and arm_prologue_prev_register
2959 routines also used for prologue-parsing based unwinding. */
2960
2961 static int
2962 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2963 struct frame_info *this_frame,
2964 void **this_prologue_cache)
2965 {
2966 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2967 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2968 CORE_ADDR addr_in_block, exidx_region, func_start;
2969 struct arm_prologue_cache *cache;
2970 gdb_byte *entry;
2971
2972 /* See if we have an ARM exception table entry covering this address. */
2973 addr_in_block = get_frame_address_in_block (this_frame);
2974 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2975 if (!entry)
2976 return 0;
2977
2978 /* The ARM exception table does not describe unwind information
2979 for arbitrary PC values, but is guaranteed to be correct only
2980 at call sites. We have to decide here whether we want to use
2981 ARM exception table information for this frame, or fall back
2982 to using prologue parsing. (Note that if we have DWARF CFI,
2983 this sniffer isn't even called -- CFI is always preferred.)
2984
2985 Before we make this decision, however, we check whether we
2986 actually have *symbol* information for the current frame.
2987 If not, prologue parsing would not work anyway, so we might
2988 as well use the exception table and hope for the best. */
2989 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2990 {
2991 int exc_valid = 0;
2992
2993 /* If the next frame is "normal", we are at a call site in this
2994 frame, so exception information is guaranteed to be valid. */
2995 if (get_next_frame (this_frame)
2996 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2997 exc_valid = 1;
2998
2999 /* We also assume exception information is valid if we're currently
3000 blocked in a system call. The system library is supposed to
3001 ensure this, so that e.g. pthread cancellation works. */
3002 if (arm_frame_is_thumb (this_frame))
3003 {
3004 ULONGEST insn;
3005
3006 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
3007 2, byte_order_for_code, &insn)
3008 && (insn & 0xff00) == 0xdf00 /* svc */)
3009 exc_valid = 1;
3010 }
3011 else
3012 {
3013 ULONGEST insn;
3014
3015 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
3016 4, byte_order_for_code, &insn)
3017 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3018 exc_valid = 1;
3019 }
3020
3021 /* Bail out if we don't know that exception information is valid. */
3022 if (!exc_valid)
3023 return 0;
3024
3025 /* The ARM exception index does not mark the *end* of the region
3026 covered by the entry, and some functions will not have any entry.
3027 To correctly recognize the end of the covered region, the linker
3028 should have inserted dummy records with a CANTUNWIND marker.
3029
3030 Unfortunately, current versions of GNU ld do not reliably do
3031 this, and thus we may have found an incorrect entry above.
3032 As a (temporary) sanity check, we only use the entry if it
3033 lies *within* the bounds of the function. Note that this check
3034 might reject perfectly valid entries that just happen to cover
3035 multiple functions; therefore this check ought to be removed
3036 once the linker is fixed. */
3037 if (func_start > exidx_region)
3038 return 0;
3039 }
3040
3041 /* Decode the list of unwinding instructions into a prologue cache.
3042 Note that this may fail due to e.g. a "refuse to unwind" code. */
3043 cache = arm_exidx_fill_cache (this_frame, entry);
3044 if (!cache)
3045 return 0;
3046
3047 *this_prologue_cache = cache;
3048 return 1;
3049 }
3050
3051 struct frame_unwind arm_exidx_unwind = {
3052 "arm exidx",
3053 NORMAL_FRAME,
3054 default_frame_unwind_stop_reason,
3055 arm_prologue_this_id,
3056 arm_prologue_prev_register,
3057 NULL,
3058 arm_exidx_unwind_sniffer
3059 };
3060
3061 static struct arm_prologue_cache *
3062 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
3063 {
3064 struct arm_prologue_cache *cache;
3065 int reg;
3066
3067 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3068 arm_cache_init (cache, this_frame);
3069
3070 /* Still rely on the offset calculated from prologue. */
3071 arm_scan_prologue (this_frame, cache);
3072
3073 /* Since we are in epilogue, the SP has been restored. */
3074 arm_gdbarch_tdep *tdep
3075 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3076 arm_cache_set_active_sp_value (cache, tdep,
3077 get_frame_register_unsigned (this_frame,
3078 ARM_SP_REGNUM));
3079
3080 /* Calculate actual addresses of saved registers using offsets
3081 determined by arm_scan_prologue. */
3082 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3083 if (cache->saved_regs[reg].is_addr ())
3084 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3085 + arm_cache_get_prev_sp_value (cache, tdep));
3086
3087 return cache;
3088 }
3089
3090 /* Implementation of function hook 'this_id' in
3091 'struct frame_uwnind' for epilogue unwinder. */
3092
3093 static void
3094 arm_epilogue_frame_this_id (struct frame_info *this_frame,
3095 void **this_cache,
3096 struct frame_id *this_id)
3097 {
3098 struct arm_prologue_cache *cache;
3099 CORE_ADDR pc, func;
3100
3101 if (*this_cache == NULL)
3102 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3103 cache = (struct arm_prologue_cache *) *this_cache;
3104
3105 /* Use function start address as part of the frame ID. If we cannot
3106 identify the start address (due to missing symbol information),
3107 fall back to just using the current PC. */
3108 pc = get_frame_pc (this_frame);
3109 func = get_frame_func (this_frame);
3110 if (func == 0)
3111 func = pc;
3112
3113 arm_gdbarch_tdep *tdep
3114 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3115 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3116 }
3117
3118 /* Implementation of function hook 'prev_register' in
3119 'struct frame_uwnind' for epilogue unwinder. */
3120
3121 static struct value *
3122 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
3123 void **this_cache, int regnum)
3124 {
3125 if (*this_cache == NULL)
3126 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3127
3128 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3129 }
3130
3131 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3132 CORE_ADDR pc);
3133 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3134 CORE_ADDR pc);
3135
3136 /* Implementation of function hook 'sniffer' in
3137 'struct frame_uwnind' for epilogue unwinder. */
3138
3139 static int
3140 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3141 struct frame_info *this_frame,
3142 void **this_prologue_cache)
3143 {
3144 if (frame_relative_level (this_frame) == 0)
3145 {
3146 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3147 CORE_ADDR pc = get_frame_pc (this_frame);
3148
3149 if (arm_frame_is_thumb (this_frame))
3150 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3151 else
3152 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3153 }
3154 else
3155 return 0;
3156 }
3157
3158 /* Frame unwinder from epilogue. */
3159
3160 static const struct frame_unwind arm_epilogue_frame_unwind =
3161 {
3162 "arm epilogue",
3163 NORMAL_FRAME,
3164 default_frame_unwind_stop_reason,
3165 arm_epilogue_frame_this_id,
3166 arm_epilogue_frame_prev_register,
3167 NULL,
3168 arm_epilogue_frame_sniffer,
3169 };
3170
3171 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3172 trampoline, return the target PC. Otherwise return 0.
3173
3174 void call0a (char c, short s, int i, long l) {}
3175
3176 int main (void)
3177 {
3178 (*pointer_to_call0a) (c, s, i, l);
3179 }
3180
3181 Instead of calling a stub library function _call_via_xx (xx is
3182 the register name), GCC may inline the trampoline in the object
3183 file as below (register r2 has the address of call0a).
3184
3185 .global main
3186 .type main, %function
3187 ...
3188 bl .L1
3189 ...
3190 .size main, .-main
3191
3192 .L1:
3193 bx r2
3194
3195 The trampoline 'bx r2' doesn't belong to main. */
3196
3197 static CORE_ADDR
3198 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
3199 {
3200 /* The heuristics of recognizing such trampoline is that FRAME is
3201 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3202 if (arm_frame_is_thumb (frame))
3203 {
3204 gdb_byte buf[2];
3205
3206 if (target_read_memory (pc, buf, 2) == 0)
3207 {
3208 struct gdbarch *gdbarch = get_frame_arch (frame);
3209 enum bfd_endian byte_order_for_code
3210 = gdbarch_byte_order_for_code (gdbarch);
3211 uint16_t insn
3212 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3213
3214 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3215 {
3216 CORE_ADDR dest
3217 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3218
3219 /* Clear the LSB so that gdb core sets step-resume
3220 breakpoint at the right address. */
3221 return UNMAKE_THUMB_ADDR (dest);
3222 }
3223 }
3224 }
3225
3226 return 0;
3227 }
3228
3229 static struct arm_prologue_cache *
3230 arm_make_stub_cache (struct frame_info *this_frame)
3231 {
3232 struct arm_prologue_cache *cache;
3233
3234 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3235 arm_cache_init (cache, this_frame);
3236
3237 arm_gdbarch_tdep *tdep
3238 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3239 arm_cache_set_active_sp_value (cache, tdep,
3240 get_frame_register_unsigned (this_frame,
3241 ARM_SP_REGNUM));
3242
3243 return cache;
3244 }
3245
3246 /* Our frame ID for a stub frame is the current SP and LR. */
3247
3248 static void
3249 arm_stub_this_id (struct frame_info *this_frame,
3250 void **this_cache,
3251 struct frame_id *this_id)
3252 {
3253 struct arm_prologue_cache *cache;
3254
3255 if (*this_cache == NULL)
3256 *this_cache = arm_make_stub_cache (this_frame);
3257 cache = (struct arm_prologue_cache *) *this_cache;
3258
3259 arm_gdbarch_tdep *tdep
3260 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3261 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3262 get_frame_pc (this_frame));
3263 }
3264
3265 static int
3266 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3267 struct frame_info *this_frame,
3268 void **this_prologue_cache)
3269 {
3270 CORE_ADDR addr_in_block;
3271 gdb_byte dummy[4];
3272 CORE_ADDR pc, start_addr;
3273 const char *name;
3274
3275 addr_in_block = get_frame_address_in_block (this_frame);
3276 pc = get_frame_pc (this_frame);
3277 if (in_plt_section (addr_in_block)
3278 /* We also use the stub winder if the target memory is unreadable
3279 to avoid having the prologue unwinder trying to read it. */
3280 || target_read_memory (pc, dummy, 4) != 0)
3281 return 1;
3282
3283 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3284 && arm_skip_bx_reg (this_frame, pc) != 0)
3285 return 1;
3286
3287 return 0;
3288 }
3289
3290 struct frame_unwind arm_stub_unwind = {
3291 "arm stub",
3292 NORMAL_FRAME,
3293 default_frame_unwind_stop_reason,
3294 arm_stub_this_id,
3295 arm_prologue_prev_register,
3296 NULL,
3297 arm_stub_unwind_sniffer
3298 };
3299
3300 /* Put here the code to store, into CACHE->saved_regs, the addresses
3301 of the saved registers of frame described by THIS_FRAME. CACHE is
3302 returned. */
3303
3304 static struct arm_prologue_cache *
3305 arm_m_exception_cache (struct frame_info *this_frame)
3306 {
3307 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3308 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3309 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3310 struct arm_prologue_cache *cache;
3311 CORE_ADDR lr;
3312 CORE_ADDR sp;
3313 CORE_ADDR unwound_sp;
3314 uint32_t sp_r0_offset = 0;
3315 LONGEST xpsr;
3316 uint32_t exc_return;
3317 bool fnc_return;
3318 uint32_t extended_frame_used;
3319 bool secure_stack_used = false;
3320 bool default_callee_register_stacking = false;
3321 bool exception_domain_is_secure = false;
3322
3323 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3324 arm_cache_init (cache, this_frame);
3325
3326 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3327 describes which bits in LR that define which stack was used prior
3328 to the exception and if FPU is used (causing extended stack frame). */
3329
3330 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3331 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3332
3333 fnc_return = ((lr & 0xfffffffe) == 0xfefffffe);
3334 if (tdep->have_sec_ext && fnc_return)
3335 {
3336 int actual_sp;
3337
3338 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_ns_regnum);
3339 arm_cache_set_active_sp_value (cache, tdep, sp);
3340 if (lr & 1)
3341 actual_sp = tdep->m_profile_msp_s_regnum;
3342 else
3343 actual_sp = tdep->m_profile_msp_ns_regnum;
3344
3345 arm_cache_switch_prev_sp (cache, tdep, actual_sp);
3346 sp = get_frame_register_unsigned (this_frame, actual_sp);
3347
3348 cache->saved_regs[ARM_LR_REGNUM].set_addr (sp);
3349
3350 arm_cache_set_active_sp_value (cache, tdep, sp + 8);
3351
3352 return cache;
3353 }
3354
3355 /* Check EXC_RETURN indicator bits (24-31). */
3356 exc_return = (((lr >> 24) & 0xff) == 0xff);
3357 if (exc_return)
3358 {
3359 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3360 bool process_stack_used = ((lr & (1 << 2)) != 0);
3361
3362 if (tdep->have_sec_ext)
3363 {
3364 secure_stack_used = ((lr & (1 << 6)) != 0);
3365 default_callee_register_stacking = ((lr & (1 << 5)) != 0);
3366 exception_domain_is_secure = ((lr & (1 << 0)) == 0);
3367
3368 /* Unwinding from non-secure to secure can trip security
3369 measures. In order to avoid the debugger being
3370 intrusive, rely on the user to configure the requested
3371 mode. */
3372 if (secure_stack_used && !exception_domain_is_secure
3373 && !arm_unwind_secure_frames)
3374 {
3375 warning (_("Non-secure to secure stack unwinding disabled."));
3376
3377 /* Terminate any further stack unwinding by referring to self. */
3378 arm_cache_set_active_sp_value (cache, tdep, sp);
3379 return cache;
3380 }
3381
3382 if (process_stack_used)
3383 {
3384 if (secure_stack_used)
3385 /* Secure thread (process) stack used, use PSP_S as SP. */
3386 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3387 else
3388 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3389 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_ns_regnum);
3390 }
3391 else
3392 {
3393 if (secure_stack_used)
3394 /* Secure main stack used, use MSP_S as SP. */
3395 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3396 else
3397 /* Non-secure main stack used, use MSP_NS as SP. */
3398 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_ns_regnum);
3399 }
3400 }
3401 else
3402 {
3403 if (process_stack_used)
3404 /* Thread (process) stack used, use PSP as SP. */
3405 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_regnum);
3406 else
3407 /* Main stack used, use MSP as SP. */
3408 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_regnum);
3409 }
3410 }
3411 else
3412 {
3413 /* Main stack used, use MSP as SP. */
3414 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_regnum);
3415 }
3416
3417 /* Fetch the SP to use for this frame. */
3418 unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3419
3420 /* With the Security extension, the hardware saves R4..R11 too. */
3421 if (exc_return && tdep->have_sec_ext && secure_stack_used
3422 && (!default_callee_register_stacking || exception_domain_is_secure))
3423 {
3424 /* Read R4..R11 from the integer callee registers. */
3425 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3426 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3427 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3428 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3429 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3430 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3431 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3432 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3433 sp_r0_offset = 0x28;
3434 }
3435
3436 /* The hardware saves eight 32-bit words, comprising xPSR,
3437 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3438 "B1.5.6 Exception entry behavior" in
3439 "ARMv7-M Architecture Reference Manual". */
3440 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3441 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 4);
3442 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 8);
3443 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 12);
3444 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset + 16);
3445 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 20);
3446 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset + 24);
3447 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset + 28);
3448
3449 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3450 type used. */
3451 extended_frame_used = ((lr & (1 << 4)) == 0);
3452 if (exc_return && extended_frame_used)
3453 {
3454 int i;
3455 int fpu_regs_stack_offset;
3456
3457 /* This code does not take into account the lazy stacking, see "Lazy
3458 context save of FP state", in B1.5.7, also ARM AN298, supported
3459 by Cortex-M4F architecture.
3460 To fully handle this the FPCCR register (Floating-point Context
3461 Control Register) needs to be read out and the bits ASPEN and LSPEN
3462 could be checked to setup correct lazy stacked FP registers.
3463 This register is located at address 0xE000EF34. */
3464
3465 /* Extended stack frame type used. */
3466 fpu_regs_stack_offset = unwound_sp + sp_r0_offset + 0x20;
3467 for (i = 0; i < 16; i++)
3468 {
3469 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3470 fpu_regs_stack_offset += 4;
3471 }
3472 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x60);
3473 fpu_regs_stack_offset += 4;
3474
3475 if (tdep->have_sec_ext && !default_callee_register_stacking)
3476 {
3477 /* Handle floating-point callee saved registers. */
3478 fpu_regs_stack_offset = 0x90;
3479 for (i = 16; i < 32; i++)
3480 {
3481 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3482 fpu_regs_stack_offset += 4;
3483 }
3484
3485 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + sp_r0_offset + 0xD0);
3486 }
3487 else
3488 {
3489 /* Offset 0x64 is reserved. */
3490 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + sp_r0_offset + 0x68);
3491 }
3492 }
3493 else
3494 {
3495 /* Standard stack frame type used. */
3496 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + sp_r0_offset + 0x20);
3497 }
3498
3499 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3500 aligner between the top of the 32-byte stack frame and the
3501 previous context's stack pointer. */
3502 if (safe_read_memory_integer (unwound_sp + sp_r0_offset + 28, 4, byte_order, &xpsr)
3503 && (xpsr & (1 << 9)) != 0)
3504 arm_cache_set_active_sp_value (cache, tdep,
3505 arm_cache_get_prev_sp_value (cache, tdep) + 4);
3506
3507 return cache;
3508 }
3509
3510 /* Implementation of function hook 'this_id' in
3511 'struct frame_uwnind'. */
3512
3513 static void
3514 arm_m_exception_this_id (struct frame_info *this_frame,
3515 void **this_cache,
3516 struct frame_id *this_id)
3517 {
3518 struct arm_prologue_cache *cache;
3519
3520 if (*this_cache == NULL)
3521 *this_cache = arm_m_exception_cache (this_frame);
3522 cache = (struct arm_prologue_cache *) *this_cache;
3523
3524 /* Our frame ID for a stub frame is the current SP and LR. */
3525 arm_gdbarch_tdep *tdep
3526 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3527 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3528 get_frame_pc (this_frame));
3529 }
3530
3531 /* Implementation of function hook 'prev_register' in
3532 'struct frame_uwnind'. */
3533
3534 static struct value *
3535 arm_m_exception_prev_register (struct frame_info *this_frame,
3536 void **this_cache,
3537 int prev_regnum)
3538 {
3539 struct arm_prologue_cache *cache;
3540 CORE_ADDR sp_value;
3541
3542 if (*this_cache == NULL)
3543 *this_cache = arm_m_exception_cache (this_frame);
3544 cache = (struct arm_prologue_cache *) *this_cache;
3545
3546 /* The value was already reconstructed into PREV_SP. */
3547 arm_gdbarch_tdep *tdep
3548 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3549 if (prev_regnum == ARM_SP_REGNUM)
3550 return frame_unwind_got_constant (this_frame, prev_regnum,
3551 arm_cache_get_prev_sp_value (cache, tdep));
3552
3553 /* The value might be one of the alternative SP, if so, use the
3554 value already constructed. */
3555 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
3556 {
3557 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3558 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3559 }
3560
3561 if (prev_regnum == ARM_PC_REGNUM)
3562 {
3563 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3564 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3565
3566 return frame_unwind_got_constant (this_frame, prev_regnum,
3567 arm_addr_bits_remove (gdbarch, lr));
3568 }
3569
3570 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3571 prev_regnum);
3572 }
3573
3574 /* Implementation of function hook 'sniffer' in
3575 'struct frame_uwnind'. */
3576
3577 static int
3578 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3579 struct frame_info *this_frame,
3580 void **this_prologue_cache)
3581 {
3582 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3583 CORE_ADDR this_pc = get_frame_pc (this_frame);
3584
3585 /* No need to check is_m; this sniffer is only registered for
3586 M-profile architectures. */
3587
3588 /* Check if exception frame returns to a magic PC value. */
3589 return arm_m_addr_is_magic (gdbarch, this_pc);
3590 }
3591
3592 /* Frame unwinder for M-profile exceptions. */
3593
3594 struct frame_unwind arm_m_exception_unwind =
3595 {
3596 "arm m exception",
3597 SIGTRAMP_FRAME,
3598 default_frame_unwind_stop_reason,
3599 arm_m_exception_this_id,
3600 arm_m_exception_prev_register,
3601 NULL,
3602 arm_m_exception_unwind_sniffer
3603 };
3604
3605 static CORE_ADDR
3606 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3607 {
3608 struct arm_prologue_cache *cache;
3609
3610 if (*this_cache == NULL)
3611 *this_cache = arm_make_prologue_cache (this_frame);
3612 cache = (struct arm_prologue_cache *) *this_cache;
3613
3614 arm_gdbarch_tdep *tdep
3615 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3616 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3617 }
3618
3619 struct frame_base arm_normal_base = {
3620 &arm_prologue_unwind,
3621 arm_normal_frame_base,
3622 arm_normal_frame_base,
3623 arm_normal_frame_base
3624 };
3625
3626 static struct value *
3627 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3628 int regnum)
3629 {
3630 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3631 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3632 CORE_ADDR lr, cpsr;
3633 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3634
3635 switch (regnum)
3636 {
3637 case ARM_PC_REGNUM:
3638 /* The PC is normally copied from the return column, which
3639 describes saves of LR. However, that version may have an
3640 extra bit set to indicate Thumb state. The bit is not
3641 part of the PC. */
3642
3643 /* Record in the frame whether the return address was signed. */
3644 if (tdep->have_pacbti)
3645 {
3646 CORE_ADDR ra_auth_code
3647 = frame_unwind_register_unsigned (this_frame,
3648 tdep->pacbti_pseudo_base);
3649
3650 if (ra_auth_code != 0)
3651 set_frame_previous_pc_masked (this_frame);
3652 }
3653
3654 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3655 return frame_unwind_got_constant (this_frame, regnum,
3656 arm_addr_bits_remove (gdbarch, lr));
3657
3658 case ARM_PS_REGNUM:
3659 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3660 cpsr = get_frame_register_unsigned (this_frame, regnum);
3661 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3662 if (IS_THUMB_ADDR (lr))
3663 cpsr |= t_bit;
3664 else
3665 cpsr &= ~t_bit;
3666 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3667
3668 default:
3669 internal_error (__FILE__, __LINE__,
3670 _("Unexpected register %d"), regnum);
3671 }
3672 }
3673
3674 /* Implement the stack_frame_destroyed_p gdbarch method. */
3675
3676 static int
3677 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3678 {
3679 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3680 unsigned int insn, insn2;
3681 int found_return = 0, found_stack_adjust = 0;
3682 CORE_ADDR func_start, func_end;
3683 CORE_ADDR scan_pc;
3684 gdb_byte buf[4];
3685
3686 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3687 return 0;
3688
3689 /* The epilogue is a sequence of instructions along the following lines:
3690
3691 - add stack frame size to SP or FP
3692 - [if frame pointer used] restore SP from FP
3693 - restore registers from SP [may include PC]
3694 - a return-type instruction [if PC wasn't already restored]
3695
3696 In a first pass, we scan forward from the current PC and verify the
3697 instructions we find as compatible with this sequence, ending in a
3698 return instruction.
3699
3700 However, this is not sufficient to distinguish indirect function calls
3701 within a function from indirect tail calls in the epilogue in some cases.
3702 Therefore, if we didn't already find any SP-changing instruction during
3703 forward scan, we add a backward scanning heuristic to ensure we actually
3704 are in the epilogue. */
3705
3706 scan_pc = pc;
3707 while (scan_pc < func_end && !found_return)
3708 {
3709 if (target_read_memory (scan_pc, buf, 2))
3710 break;
3711
3712 scan_pc += 2;
3713 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3714
3715 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3716 found_return = 1;
3717 else if (insn == 0x46f7) /* mov pc, lr */
3718 found_return = 1;
3719 else if (thumb_instruction_restores_sp (insn))
3720 {
3721 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3722 found_return = 1;
3723 }
3724 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3725 {
3726 if (target_read_memory (scan_pc, buf, 2))
3727 break;
3728
3729 scan_pc += 2;
3730 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3731
3732 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3733 {
3734 if (insn2 & 0x8000) /* <registers> include PC. */
3735 found_return = 1;
3736 }
3737 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3738 && (insn2 & 0x0fff) == 0x0b04)
3739 {
3740 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3741 found_return = 1;
3742 }
3743 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3744 && (insn2 & 0x0e00) == 0x0a00)
3745 ;
3746 else
3747 break;
3748 }
3749 else
3750 break;
3751 }
3752
3753 if (!found_return)
3754 return 0;
3755
3756 /* Since any instruction in the epilogue sequence, with the possible
3757 exception of return itself, updates the stack pointer, we need to
3758 scan backwards for at most one instruction. Try either a 16-bit or
3759 a 32-bit instruction. This is just a heuristic, so we do not worry
3760 too much about false positives. */
3761
3762 if (pc - 4 < func_start)
3763 return 0;
3764 if (target_read_memory (pc - 4, buf, 4))
3765 return 0;
3766
3767 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3768 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3769
3770 if (thumb_instruction_restores_sp (insn2))
3771 found_stack_adjust = 1;
3772 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3773 found_stack_adjust = 1;
3774 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3775 && (insn2 & 0x0fff) == 0x0b04)
3776 found_stack_adjust = 1;
3777 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3778 && (insn2 & 0x0e00) == 0x0a00)
3779 found_stack_adjust = 1;
3780
3781 return found_stack_adjust;
3782 }
3783
3784 static int
3785 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3786 {
3787 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3788 unsigned int insn;
3789 int found_return;
3790 CORE_ADDR func_start, func_end;
3791
3792 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3793 return 0;
3794
3795 /* We are in the epilogue if the previous instruction was a stack
3796 adjustment and the next instruction is a possible return (bx, mov
3797 pc, or pop). We could have to scan backwards to find the stack
3798 adjustment, or forwards to find the return, but this is a decent
3799 approximation. First scan forwards. */
3800
3801 found_return = 0;
3802 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3803 if (bits (insn, 28, 31) != INST_NV)
3804 {
3805 if ((insn & 0x0ffffff0) == 0x012fff10)
3806 /* BX. */
3807 found_return = 1;
3808 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3809 /* MOV PC. */
3810 found_return = 1;
3811 else if ((insn & 0x0fff0000) == 0x08bd0000
3812 && (insn & 0x0000c000) != 0)
3813 /* POP (LDMIA), including PC or LR. */
3814 found_return = 1;
3815 }
3816
3817 if (!found_return)
3818 return 0;
3819
3820 /* Scan backwards. This is just a heuristic, so do not worry about
3821 false positives from mode changes. */
3822
3823 if (pc < func_start + 4)
3824 return 0;
3825
3826 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3827 if (arm_instruction_restores_sp (insn))
3828 return 1;
3829
3830 return 0;
3831 }
3832
3833 /* Implement the stack_frame_destroyed_p gdbarch method. */
3834
3835 static int
3836 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3837 {
3838 if (arm_pc_is_thumb (gdbarch, pc))
3839 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3840 else
3841 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3842 }
3843
3844 /* When arguments must be pushed onto the stack, they go on in reverse
3845 order. The code below implements a FILO (stack) to do this. */
3846
3847 struct stack_item
3848 {
3849 int len;
3850 struct stack_item *prev;
3851 gdb_byte *data;
3852 };
3853
3854 static struct stack_item *
3855 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3856 {
3857 struct stack_item *si;
3858 si = XNEW (struct stack_item);
3859 si->data = (gdb_byte *) xmalloc (len);
3860 si->len = len;
3861 si->prev = prev;
3862 memcpy (si->data, contents, len);
3863 return si;
3864 }
3865
3866 static struct stack_item *
3867 pop_stack_item (struct stack_item *si)
3868 {
3869 struct stack_item *dead = si;
3870 si = si->prev;
3871 xfree (dead->data);
3872 xfree (dead);
3873 return si;
3874 }
3875
3876 /* Implement the gdbarch type alignment method, overrides the generic
3877 alignment algorithm for anything that is arm specific. */
3878
3879 static ULONGEST
3880 arm_type_align (gdbarch *gdbarch, struct type *t)
3881 {
3882 t = check_typedef (t);
3883 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3884 {
3885 /* Use the natural alignment for vector types (the same for
3886 scalar type), but the maximum alignment is 64-bit. */
3887 if (TYPE_LENGTH (t) > 8)
3888 return 8;
3889 else
3890 return TYPE_LENGTH (t);
3891 }
3892
3893 /* Allow the common code to calculate the alignment. */
3894 return 0;
3895 }
3896
3897 /* Possible base types for a candidate for passing and returning in
3898 VFP registers. */
3899
3900 enum arm_vfp_cprc_base_type
3901 {
3902 VFP_CPRC_UNKNOWN,
3903 VFP_CPRC_SINGLE,
3904 VFP_CPRC_DOUBLE,
3905 VFP_CPRC_VEC64,
3906 VFP_CPRC_VEC128
3907 };
3908
3909 /* The length of one element of base type B. */
3910
3911 static unsigned
3912 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3913 {
3914 switch (b)
3915 {
3916 case VFP_CPRC_SINGLE:
3917 return 4;
3918 case VFP_CPRC_DOUBLE:
3919 return 8;
3920 case VFP_CPRC_VEC64:
3921 return 8;
3922 case VFP_CPRC_VEC128:
3923 return 16;
3924 default:
3925 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3926 (int) b);
3927 }
3928 }
3929
3930 /* The character ('s', 'd' or 'q') for the type of VFP register used
3931 for passing base type B. */
3932
3933 static int
3934 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3935 {
3936 switch (b)
3937 {
3938 case VFP_CPRC_SINGLE:
3939 return 's';
3940 case VFP_CPRC_DOUBLE:
3941 return 'd';
3942 case VFP_CPRC_VEC64:
3943 return 'd';
3944 case VFP_CPRC_VEC128:
3945 return 'q';
3946 default:
3947 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3948 (int) b);
3949 }
3950 }
3951
3952 /* Determine whether T may be part of a candidate for passing and
3953 returning in VFP registers, ignoring the limit on the total number
3954 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3955 classification of the first valid component found; if it is not
3956 VFP_CPRC_UNKNOWN, all components must have the same classification
3957 as *BASE_TYPE. If it is found that T contains a type not permitted
3958 for passing and returning in VFP registers, a type differently
3959 classified from *BASE_TYPE, or two types differently classified
3960 from each other, return -1, otherwise return the total number of
3961 base-type elements found (possibly 0 in an empty structure or
3962 array). Vector types are not currently supported, matching the
3963 generic AAPCS support. */
3964
3965 static int
3966 arm_vfp_cprc_sub_candidate (struct type *t,
3967 enum arm_vfp_cprc_base_type *base_type)
3968 {
3969 t = check_typedef (t);
3970 switch (t->code ())
3971 {
3972 case TYPE_CODE_FLT:
3973 switch (TYPE_LENGTH (t))
3974 {
3975 case 4:
3976 if (*base_type == VFP_CPRC_UNKNOWN)
3977 *base_type = VFP_CPRC_SINGLE;
3978 else if (*base_type != VFP_CPRC_SINGLE)
3979 return -1;
3980 return 1;
3981
3982 case 8:
3983 if (*base_type == VFP_CPRC_UNKNOWN)
3984 *base_type = VFP_CPRC_DOUBLE;
3985 else if (*base_type != VFP_CPRC_DOUBLE)
3986 return -1;
3987 return 1;
3988
3989 default:
3990 return -1;
3991 }
3992 break;
3993
3994 case TYPE_CODE_COMPLEX:
3995 /* Arguments of complex T where T is one of the types float or
3996 double get treated as if they are implemented as:
3997
3998 struct complexT
3999 {
4000 T real;
4001 T imag;
4002 };
4003
4004 */
4005 switch (TYPE_LENGTH (t))
4006 {
4007 case 8:
4008 if (*base_type == VFP_CPRC_UNKNOWN)
4009 *base_type = VFP_CPRC_SINGLE;
4010 else if (*base_type != VFP_CPRC_SINGLE)
4011 return -1;
4012 return 2;
4013
4014 case 16:
4015 if (*base_type == VFP_CPRC_UNKNOWN)
4016 *base_type = VFP_CPRC_DOUBLE;
4017 else if (*base_type != VFP_CPRC_DOUBLE)
4018 return -1;
4019 return 2;
4020
4021 default:
4022 return -1;
4023 }
4024 break;
4025
4026 case TYPE_CODE_ARRAY:
4027 {
4028 if (t->is_vector ())
4029 {
4030 /* A 64-bit or 128-bit containerized vector type are VFP
4031 CPRCs. */
4032 switch (TYPE_LENGTH (t))
4033 {
4034 case 8:
4035 if (*base_type == VFP_CPRC_UNKNOWN)
4036 *base_type = VFP_CPRC_VEC64;
4037 return 1;
4038 case 16:
4039 if (*base_type == VFP_CPRC_UNKNOWN)
4040 *base_type = VFP_CPRC_VEC128;
4041 return 1;
4042 default:
4043 return -1;
4044 }
4045 }
4046 else
4047 {
4048 int count;
4049 unsigned unitlen;
4050
4051 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
4052 base_type);
4053 if (count == -1)
4054 return -1;
4055 if (TYPE_LENGTH (t) == 0)
4056 {
4057 gdb_assert (count == 0);
4058 return 0;
4059 }
4060 else if (count == 0)
4061 return -1;
4062 unitlen = arm_vfp_cprc_unit_length (*base_type);
4063 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
4064 return TYPE_LENGTH (t) / unitlen;
4065 }
4066 }
4067 break;
4068
4069 case TYPE_CODE_STRUCT:
4070 {
4071 int count = 0;
4072 unsigned unitlen;
4073 int i;
4074 for (i = 0; i < t->num_fields (); i++)
4075 {
4076 int sub_count = 0;
4077
4078 if (!field_is_static (&t->field (i)))
4079 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4080 base_type);
4081 if (sub_count == -1)
4082 return -1;
4083 count += sub_count;
4084 }
4085 if (TYPE_LENGTH (t) == 0)
4086 {
4087 gdb_assert (count == 0);
4088 return 0;
4089 }
4090 else if (count == 0)
4091 return -1;
4092 unitlen = arm_vfp_cprc_unit_length (*base_type);
4093 if (TYPE_LENGTH (t) != unitlen * count)
4094 return -1;
4095 return count;
4096 }
4097
4098 case TYPE_CODE_UNION:
4099 {
4100 int count = 0;
4101 unsigned unitlen;
4102 int i;
4103 for (i = 0; i < t->num_fields (); i++)
4104 {
4105 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4106 base_type);
4107 if (sub_count == -1)
4108 return -1;
4109 count = (count > sub_count ? count : sub_count);
4110 }
4111 if (TYPE_LENGTH (t) == 0)
4112 {
4113 gdb_assert (count == 0);
4114 return 0;
4115 }
4116 else if (count == 0)
4117 return -1;
4118 unitlen = arm_vfp_cprc_unit_length (*base_type);
4119 if (TYPE_LENGTH (t) != unitlen * count)
4120 return -1;
4121 return count;
4122 }
4123
4124 default:
4125 break;
4126 }
4127
4128 return -1;
4129 }
4130
4131 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4132 if passed to or returned from a non-variadic function with the VFP
4133 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4134 *BASE_TYPE to the base type for T and *COUNT to the number of
4135 elements of that base type before returning. */
4136
4137 static int
4138 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4139 int *count)
4140 {
4141 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4142 int c = arm_vfp_cprc_sub_candidate (t, &b);
4143 if (c <= 0 || c > 4)
4144 return 0;
4145 *base_type = b;
4146 *count = c;
4147 return 1;
4148 }
4149
4150 /* Return 1 if the VFP ABI should be used for passing arguments to and
4151 returning values from a function of type FUNC_TYPE, 0
4152 otherwise. */
4153
4154 static int
4155 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4156 {
4157 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4158
4159 /* Variadic functions always use the base ABI. Assume that functions
4160 without debug info are not variadic. */
4161 if (func_type && check_typedef (func_type)->has_varargs ())
4162 return 0;
4163
4164 /* The VFP ABI is only supported as a variant of AAPCS. */
4165 if (tdep->arm_abi != ARM_ABI_AAPCS)
4166 return 0;
4167
4168 return tdep->fp_model == ARM_FLOAT_VFP;
4169 }
4170
4171 /* We currently only support passing parameters in integer registers, which
4172 conforms with GCC's default model, and VFP argument passing following
4173 the VFP variant of AAPCS. Several other variants exist and
4174 we should probably support some of them based on the selected ABI. */
4175
4176 static CORE_ADDR
4177 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4178 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4179 struct value **args, CORE_ADDR sp,
4180 function_call_return_method return_method,
4181 CORE_ADDR struct_addr)
4182 {
4183 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4184 int argnum;
4185 int argreg;
4186 int nstack;
4187 struct stack_item *si = NULL;
4188 int use_vfp_abi;
4189 struct type *ftype;
4190 unsigned vfp_regs_free = (1 << 16) - 1;
4191 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4192
4193 /* Determine the type of this function and whether the VFP ABI
4194 applies. */
4195 ftype = check_typedef (value_type (function));
4196 if (ftype->code () == TYPE_CODE_PTR)
4197 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
4198 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4199
4200 /* Set the return address. For the ARM, the return breakpoint is
4201 always at BP_ADDR. */
4202 if (arm_pc_is_thumb (gdbarch, bp_addr))
4203 bp_addr |= 1;
4204 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4205
4206 /* Walk through the list of args and determine how large a temporary
4207 stack is required. Need to take care here as structs may be
4208 passed on the stack, and we have to push them. */
4209 nstack = 0;
4210
4211 argreg = ARM_A1_REGNUM;
4212 nstack = 0;
4213
4214 /* The struct_return pointer occupies the first parameter
4215 passing register. */
4216 if (return_method == return_method_struct)
4217 {
4218 arm_debug_printf ("struct return in %s = %s",
4219 gdbarch_register_name (gdbarch, argreg),
4220 paddress (gdbarch, struct_addr));
4221
4222 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4223 argreg++;
4224 }
4225
4226 for (argnum = 0; argnum < nargs; argnum++)
4227 {
4228 int len;
4229 struct type *arg_type;
4230 struct type *target_type;
4231 enum type_code typecode;
4232 const bfd_byte *val;
4233 int align;
4234 enum arm_vfp_cprc_base_type vfp_base_type;
4235 int vfp_base_count;
4236 int may_use_core_reg = 1;
4237
4238 arg_type = check_typedef (value_type (args[argnum]));
4239 len = TYPE_LENGTH (arg_type);
4240 target_type = TYPE_TARGET_TYPE (arg_type);
4241 typecode = arg_type->code ();
4242 val = value_contents (args[argnum]).data ();
4243
4244 align = type_align (arg_type);
4245 /* Round alignment up to a whole number of words. */
4246 align = (align + ARM_INT_REGISTER_SIZE - 1)
4247 & ~(ARM_INT_REGISTER_SIZE - 1);
4248 /* Different ABIs have different maximum alignments. */
4249 if (tdep->arm_abi == ARM_ABI_APCS)
4250 {
4251 /* The APCS ABI only requires word alignment. */
4252 align = ARM_INT_REGISTER_SIZE;
4253 }
4254 else
4255 {
4256 /* The AAPCS requires at most doubleword alignment. */
4257 if (align > ARM_INT_REGISTER_SIZE * 2)
4258 align = ARM_INT_REGISTER_SIZE * 2;
4259 }
4260
4261 if (use_vfp_abi
4262 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4263 &vfp_base_count))
4264 {
4265 int regno;
4266 int unit_length;
4267 int shift;
4268 unsigned mask;
4269
4270 /* Because this is a CPRC it cannot go in a core register or
4271 cause a core register to be skipped for alignment.
4272 Either it goes in VFP registers and the rest of this loop
4273 iteration is skipped for this argument, or it goes on the
4274 stack (and the stack alignment code is correct for this
4275 case). */
4276 may_use_core_reg = 0;
4277
4278 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4279 shift = unit_length / 4;
4280 mask = (1 << (shift * vfp_base_count)) - 1;
4281 for (regno = 0; regno < 16; regno += shift)
4282 if (((vfp_regs_free >> regno) & mask) == mask)
4283 break;
4284
4285 if (regno < 16)
4286 {
4287 int reg_char;
4288 int reg_scaled;
4289 int i;
4290
4291 vfp_regs_free &= ~(mask << regno);
4292 reg_scaled = regno / shift;
4293 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4294 for (i = 0; i < vfp_base_count; i++)
4295 {
4296 char name_buf[4];
4297 int regnum;
4298 if (reg_char == 'q')
4299 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4300 val + i * unit_length);
4301 else
4302 {
4303 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4304 reg_char, reg_scaled + i);
4305 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4306 strlen (name_buf));
4307 regcache->cooked_write (regnum, val + i * unit_length);
4308 }
4309 }
4310 continue;
4311 }
4312 else
4313 {
4314 /* This CPRC could not go in VFP registers, so all VFP
4315 registers are now marked as used. */
4316 vfp_regs_free = 0;
4317 }
4318 }
4319
4320 /* Push stack padding for doubleword alignment. */
4321 if (nstack & (align - 1))
4322 {
4323 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4324 nstack += ARM_INT_REGISTER_SIZE;
4325 }
4326
4327 /* Doubleword aligned quantities must go in even register pairs. */
4328 if (may_use_core_reg
4329 && argreg <= ARM_LAST_ARG_REGNUM
4330 && align > ARM_INT_REGISTER_SIZE
4331 && argreg & 1)
4332 argreg++;
4333
4334 /* If the argument is a pointer to a function, and it is a
4335 Thumb function, create a LOCAL copy of the value and set
4336 the THUMB bit in it. */
4337 if (TYPE_CODE_PTR == typecode
4338 && target_type != NULL
4339 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4340 {
4341 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4342 if (arm_pc_is_thumb (gdbarch, regval))
4343 {
4344 bfd_byte *copy = (bfd_byte *) alloca (len);
4345 store_unsigned_integer (copy, len, byte_order,
4346 MAKE_THUMB_ADDR (regval));
4347 val = copy;
4348 }
4349 }
4350
4351 /* Copy the argument to general registers or the stack in
4352 register-sized pieces. Large arguments are split between
4353 registers and stack. */
4354 while (len > 0)
4355 {
4356 int partial_len = len < ARM_INT_REGISTER_SIZE
4357 ? len : ARM_INT_REGISTER_SIZE;
4358 CORE_ADDR regval
4359 = extract_unsigned_integer (val, partial_len, byte_order);
4360
4361 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4362 {
4363 /* The argument is being passed in a general purpose
4364 register. */
4365 if (byte_order == BFD_ENDIAN_BIG)
4366 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4367
4368 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4369 gdbarch_register_name (gdbarch, argreg),
4370 phex (regval, ARM_INT_REGISTER_SIZE));
4371
4372 regcache_cooked_write_unsigned (regcache, argreg, regval);
4373 argreg++;
4374 }
4375 else
4376 {
4377 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4378
4379 memset (buf, 0, sizeof (buf));
4380 store_unsigned_integer (buf, partial_len, byte_order, regval);
4381
4382 /* Push the arguments onto the stack. */
4383 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4384 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4385 nstack += ARM_INT_REGISTER_SIZE;
4386 }
4387
4388 len -= partial_len;
4389 val += partial_len;
4390 }
4391 }
4392 /* If we have an odd number of words to push, then decrement the stack
4393 by one word now, so first stack argument will be dword aligned. */
4394 if (nstack & 4)
4395 sp -= 4;
4396
4397 while (si)
4398 {
4399 sp -= si->len;
4400 write_memory (sp, si->data, si->len);
4401 si = pop_stack_item (si);
4402 }
4403
4404 /* Finally, update teh SP register. */
4405 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4406
4407 return sp;
4408 }
4409
4410
4411 /* Always align the frame to an 8-byte boundary. This is required on
4412 some platforms and harmless on the rest. */
4413
4414 static CORE_ADDR
4415 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4416 {
4417 /* Align the stack to eight bytes. */
4418 return sp & ~ (CORE_ADDR) 7;
4419 }
4420
4421 static void
4422 print_fpu_flags (struct ui_file *file, int flags)
4423 {
4424 if (flags & (1 << 0))
4425 gdb_puts ("IVO ", file);
4426 if (flags & (1 << 1))
4427 gdb_puts ("DVZ ", file);
4428 if (flags & (1 << 2))
4429 gdb_puts ("OFL ", file);
4430 if (flags & (1 << 3))
4431 gdb_puts ("UFL ", file);
4432 if (flags & (1 << 4))
4433 gdb_puts ("INX ", file);
4434 gdb_putc ('\n', file);
4435 }
4436
4437 /* Print interesting information about the floating point processor
4438 (if present) or emulator. */
4439 static void
4440 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4441 struct frame_info *frame, const char *args)
4442 {
4443 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4444 int type;
4445
4446 type = (status >> 24) & 127;
4447 if (status & (1 << 31))
4448 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4449 else
4450 gdb_printf (file, _("Software FPU type %d\n"), type);
4451 /* i18n: [floating point unit] mask */
4452 gdb_puts (_("mask: "), file);
4453 print_fpu_flags (file, status >> 16);
4454 /* i18n: [floating point unit] flags */
4455 gdb_puts (_("flags: "), file);
4456 print_fpu_flags (file, status);
4457 }
4458
4459 /* Construct the ARM extended floating point type. */
4460 static struct type *
4461 arm_ext_type (struct gdbarch *gdbarch)
4462 {
4463 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4464
4465 if (!tdep->arm_ext_type)
4466 tdep->arm_ext_type
4467 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4468 floatformats_arm_ext);
4469
4470 return tdep->arm_ext_type;
4471 }
4472
4473 static struct type *
4474 arm_neon_double_type (struct gdbarch *gdbarch)
4475 {
4476 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4477
4478 if (tdep->neon_double_type == NULL)
4479 {
4480 struct type *t, *elem;
4481
4482 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4483 TYPE_CODE_UNION);
4484 elem = builtin_type (gdbarch)->builtin_uint8;
4485 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4486 elem = builtin_type (gdbarch)->builtin_uint16;
4487 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4488 elem = builtin_type (gdbarch)->builtin_uint32;
4489 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4490 elem = builtin_type (gdbarch)->builtin_uint64;
4491 append_composite_type_field (t, "u64", elem);
4492 elem = builtin_type (gdbarch)->builtin_float;
4493 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4494 elem = builtin_type (gdbarch)->builtin_double;
4495 append_composite_type_field (t, "f64", elem);
4496
4497 t->set_is_vector (true);
4498 t->set_name ("neon_d");
4499 tdep->neon_double_type = t;
4500 }
4501
4502 return tdep->neon_double_type;
4503 }
4504
4505 /* FIXME: The vector types are not correctly ordered on big-endian
4506 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4507 bits of d0 - regardless of what unit size is being held in d0. So
4508 the offset of the first uint8 in d0 is 7, but the offset of the
4509 first float is 4. This code works as-is for little-endian
4510 targets. */
4511
4512 static struct type *
4513 arm_neon_quad_type (struct gdbarch *gdbarch)
4514 {
4515 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4516
4517 if (tdep->neon_quad_type == NULL)
4518 {
4519 struct type *t, *elem;
4520
4521 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4522 TYPE_CODE_UNION);
4523 elem = builtin_type (gdbarch)->builtin_uint8;
4524 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4525 elem = builtin_type (gdbarch)->builtin_uint16;
4526 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4527 elem = builtin_type (gdbarch)->builtin_uint32;
4528 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4529 elem = builtin_type (gdbarch)->builtin_uint64;
4530 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4531 elem = builtin_type (gdbarch)->builtin_float;
4532 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4533 elem = builtin_type (gdbarch)->builtin_double;
4534 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4535
4536 t->set_is_vector (true);
4537 t->set_name ("neon_q");
4538 tdep->neon_quad_type = t;
4539 }
4540
4541 return tdep->neon_quad_type;
4542 }
4543
4544 /* Return true if REGNUM is a Q pseudo register. Return false
4545 otherwise.
4546
4547 REGNUM is the raw register number and not a pseudo-relative register
4548 number. */
4549
4550 static bool
4551 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4552 {
4553 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4554
4555 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4556 MVE (Q0~Q7) features. */
4557 if (tdep->have_q_pseudos
4558 && regnum >= tdep->q_pseudo_base
4559 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4560 return true;
4561
4562 return false;
4563 }
4564
4565 /* Return true if REGNUM is a VFP S pseudo register. Return false
4566 otherwise.
4567
4568 REGNUM is the raw register number and not a pseudo-relative register
4569 number. */
4570
4571 static bool
4572 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4573 {
4574 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4575
4576 if (tdep->have_s_pseudos
4577 && regnum >= tdep->s_pseudo_base
4578 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4579 return true;
4580
4581 return false;
4582 }
4583
4584 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4585 otherwise.
4586
4587 REGNUM is the raw register number and not a pseudo-relative register
4588 number. */
4589
4590 static bool
4591 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4592 {
4593 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4594
4595 if (tdep->have_mve
4596 && regnum >= tdep->mve_pseudo_base
4597 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4598 return true;
4599
4600 return false;
4601 }
4602
4603 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
4604 false otherwise.
4605
4606 REGNUM is the raw register number and not a pseudo-relative register
4607 number. */
4608
4609 static bool
4610 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
4611 {
4612 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4613
4614 if (tdep->have_pacbti
4615 && regnum >= tdep->pacbti_pseudo_base
4616 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
4617 return true;
4618
4619 return false;
4620 }
4621
4622 /* Return the GDB type object for the "standard" data type of data in
4623 register N. */
4624
4625 static struct type *
4626 arm_register_type (struct gdbarch *gdbarch, int regnum)
4627 {
4628 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4629
4630 if (is_s_pseudo (gdbarch, regnum))
4631 return builtin_type (gdbarch)->builtin_float;
4632
4633 if (is_q_pseudo (gdbarch, regnum))
4634 return arm_neon_quad_type (gdbarch);
4635
4636 if (is_mve_pseudo (gdbarch, regnum))
4637 return builtin_type (gdbarch)->builtin_int16;
4638
4639 if (is_pacbti_pseudo (gdbarch, regnum))
4640 return builtin_type (gdbarch)->builtin_uint32;
4641
4642 /* If the target description has register information, we are only
4643 in this function so that we can override the types of
4644 double-precision registers for NEON. */
4645 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4646 {
4647 struct type *t = tdesc_register_type (gdbarch, regnum);
4648
4649 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4650 && t->code () == TYPE_CODE_FLT
4651 && tdep->have_neon)
4652 return arm_neon_double_type (gdbarch);
4653 else
4654 return t;
4655 }
4656
4657 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4658 {
4659 if (!tdep->have_fpa_registers)
4660 return builtin_type (gdbarch)->builtin_void;
4661
4662 return arm_ext_type (gdbarch);
4663 }
4664 else if (regnum == ARM_SP_REGNUM)
4665 return builtin_type (gdbarch)->builtin_data_ptr;
4666 else if (regnum == ARM_PC_REGNUM)
4667 return builtin_type (gdbarch)->builtin_func_ptr;
4668 else if (regnum >= ARRAY_SIZE (arm_register_names))
4669 /* These registers are only supported on targets which supply
4670 an XML description. */
4671 return builtin_type (gdbarch)->builtin_int0;
4672 else
4673 return builtin_type (gdbarch)->builtin_uint32;
4674 }
4675
4676 /* Map a DWARF register REGNUM onto the appropriate GDB register
4677 number. */
4678
4679 static int
4680 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4681 {
4682 /* Core integer regs. */
4683 if (reg >= 0 && reg <= 15)
4684 return reg;
4685
4686 /* Legacy FPA encoding. These were once used in a way which
4687 overlapped with VFP register numbering, so their use is
4688 discouraged, but GDB doesn't support the ARM toolchain
4689 which used them for VFP. */
4690 if (reg >= 16 && reg <= 23)
4691 return ARM_F0_REGNUM + reg - 16;
4692
4693 /* New assignments for the FPA registers. */
4694 if (reg >= 96 && reg <= 103)
4695 return ARM_F0_REGNUM + reg - 96;
4696
4697 /* WMMX register assignments. */
4698 if (reg >= 104 && reg <= 111)
4699 return ARM_WCGR0_REGNUM + reg - 104;
4700
4701 if (reg >= 112 && reg <= 127)
4702 return ARM_WR0_REGNUM + reg - 112;
4703
4704 /* PACBTI register containing the Pointer Authentication Code. */
4705 if (reg == ARM_DWARF_RA_AUTH_CODE)
4706 {
4707 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4708
4709 if (tdep->have_pacbti)
4710 return tdep->pacbti_pseudo_base;
4711
4712 return -1;
4713 }
4714
4715 if (reg >= 192 && reg <= 199)
4716 return ARM_WC0_REGNUM + reg - 192;
4717
4718 /* VFP v2 registers. A double precision value is actually
4719 in d1 rather than s2, but the ABI only defines numbering
4720 for the single precision registers. This will "just work"
4721 in GDB for little endian targets (we'll read eight bytes,
4722 starting in s0 and then progressing to s1), but will be
4723 reversed on big endian targets with VFP. This won't
4724 be a problem for the new Neon quad registers; you're supposed
4725 to use DW_OP_piece for those. */
4726 if (reg >= 64 && reg <= 95)
4727 {
4728 char name_buf[4];
4729
4730 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4731 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4732 strlen (name_buf));
4733 }
4734
4735 /* VFP v3 / Neon registers. This range is also used for VFP v2
4736 registers, except that it now describes d0 instead of s0. */
4737 if (reg >= 256 && reg <= 287)
4738 {
4739 char name_buf[4];
4740
4741 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4742 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4743 strlen (name_buf));
4744 }
4745
4746 return -1;
4747 }
4748
4749 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4750 static int
4751 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4752 {
4753 int reg = regnum;
4754 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4755
4756 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4757 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4758
4759 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4760 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4761
4762 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4763 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4764
4765 if (reg < NUM_GREGS)
4766 return SIM_ARM_R0_REGNUM + reg;
4767 reg -= NUM_GREGS;
4768
4769 if (reg < NUM_FREGS)
4770 return SIM_ARM_FP0_REGNUM + reg;
4771 reg -= NUM_FREGS;
4772
4773 if (reg < NUM_SREGS)
4774 return SIM_ARM_FPS_REGNUM + reg;
4775 reg -= NUM_SREGS;
4776
4777 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4778 }
4779
4780 static const unsigned char op_lit0 = DW_OP_lit0;
4781
4782 static void
4783 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
4784 struct dwarf2_frame_state_reg *reg,
4785 struct frame_info *this_frame)
4786 {
4787 if (is_pacbti_pseudo (gdbarch, regnum))
4788 {
4789 /* Initialize RA_AUTH_CODE to zero. */
4790 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
4791 reg->loc.exp.start = &op_lit0;
4792 reg->loc.exp.len = 1;
4793 return;
4794 }
4795
4796 switch (regnum)
4797 {
4798 case ARM_PC_REGNUM:
4799 case ARM_PS_REGNUM:
4800 reg->how = DWARF2_FRAME_REG_FN;
4801 reg->loc.fn = arm_dwarf2_prev_register;
4802 break;
4803 case ARM_SP_REGNUM:
4804 reg->how = DWARF2_FRAME_REG_CFA;
4805 break;
4806 }
4807 }
4808
4809 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4810 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4811 NULL if an error occurs. BUF is freed. */
4812
4813 static gdb_byte *
4814 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4815 int old_len, int new_len)
4816 {
4817 gdb_byte *new_buf;
4818 int bytes_to_read = new_len - old_len;
4819
4820 new_buf = (gdb_byte *) xmalloc (new_len);
4821 memcpy (new_buf + bytes_to_read, buf, old_len);
4822 xfree (buf);
4823 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4824 {
4825 xfree (new_buf);
4826 return NULL;
4827 }
4828 return new_buf;
4829 }
4830
4831 /* An IT block is at most the 2-byte IT instruction followed by
4832 four 4-byte instructions. The furthest back we must search to
4833 find an IT block that affects the current instruction is thus
4834 2 + 3 * 4 == 14 bytes. */
4835 #define MAX_IT_BLOCK_PREFIX 14
4836
4837 /* Use a quick scan if there are more than this many bytes of
4838 code. */
4839 #define IT_SCAN_THRESHOLD 32
4840
4841 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4842 A breakpoint in an IT block may not be hit, depending on the
4843 condition flags. */
4844 static CORE_ADDR
4845 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4846 {
4847 gdb_byte *buf;
4848 char map_type;
4849 CORE_ADDR boundary, func_start;
4850 int buf_len;
4851 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4852 int i, any, last_it, last_it_count;
4853 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4854
4855 /* If we are using BKPT breakpoints, none of this is necessary. */
4856 if (tdep->thumb2_breakpoint == NULL)
4857 return bpaddr;
4858
4859 /* ARM mode does not have this problem. */
4860 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4861 return bpaddr;
4862
4863 /* We are setting a breakpoint in Thumb code that could potentially
4864 contain an IT block. The first step is to find how much Thumb
4865 code there is; we do not need to read outside of known Thumb
4866 sequences. */
4867 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4868 if (map_type == 0)
4869 /* Thumb-2 code must have mapping symbols to have a chance. */
4870 return bpaddr;
4871
4872 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4873
4874 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4875 && func_start > boundary)
4876 boundary = func_start;
4877
4878 /* Search for a candidate IT instruction. We have to do some fancy
4879 footwork to distinguish a real IT instruction from the second
4880 half of a 32-bit instruction, but there is no need for that if
4881 there's no candidate. */
4882 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4883 if (buf_len == 0)
4884 /* No room for an IT instruction. */
4885 return bpaddr;
4886
4887 buf = (gdb_byte *) xmalloc (buf_len);
4888 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4889 return bpaddr;
4890 any = 0;
4891 for (i = 0; i < buf_len; i += 2)
4892 {
4893 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4894 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4895 {
4896 any = 1;
4897 break;
4898 }
4899 }
4900
4901 if (any == 0)
4902 {
4903 xfree (buf);
4904 return bpaddr;
4905 }
4906
4907 /* OK, the code bytes before this instruction contain at least one
4908 halfword which resembles an IT instruction. We know that it's
4909 Thumb code, but there are still two possibilities. Either the
4910 halfword really is an IT instruction, or it is the second half of
4911 a 32-bit Thumb instruction. The only way we can tell is to
4912 scan forwards from a known instruction boundary. */
4913 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4914 {
4915 int definite;
4916
4917 /* There's a lot of code before this instruction. Start with an
4918 optimistic search; it's easy to recognize halfwords that can
4919 not be the start of a 32-bit instruction, and use that to
4920 lock on to the instruction boundaries. */
4921 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4922 if (buf == NULL)
4923 return bpaddr;
4924 buf_len = IT_SCAN_THRESHOLD;
4925
4926 definite = 0;
4927 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4928 {
4929 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4930 if (thumb_insn_size (inst1) == 2)
4931 {
4932 definite = 1;
4933 break;
4934 }
4935 }
4936
4937 /* At this point, if DEFINITE, BUF[I] is the first place we
4938 are sure that we know the instruction boundaries, and it is far
4939 enough from BPADDR that we could not miss an IT instruction
4940 affecting BPADDR. If ! DEFINITE, give up - start from a
4941 known boundary. */
4942 if (! definite)
4943 {
4944 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4945 bpaddr - boundary);
4946 if (buf == NULL)
4947 return bpaddr;
4948 buf_len = bpaddr - boundary;
4949 i = 0;
4950 }
4951 }
4952 else
4953 {
4954 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4955 if (buf == NULL)
4956 return bpaddr;
4957 buf_len = bpaddr - boundary;
4958 i = 0;
4959 }
4960
4961 /* Scan forwards. Find the last IT instruction before BPADDR. */
4962 last_it = -1;
4963 last_it_count = 0;
4964 while (i < buf_len)
4965 {
4966 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4967 last_it_count--;
4968 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4969 {
4970 last_it = i;
4971 if (inst1 & 0x0001)
4972 last_it_count = 4;
4973 else if (inst1 & 0x0002)
4974 last_it_count = 3;
4975 else if (inst1 & 0x0004)
4976 last_it_count = 2;
4977 else
4978 last_it_count = 1;
4979 }
4980 i += thumb_insn_size (inst1);
4981 }
4982
4983 xfree (buf);
4984
4985 if (last_it == -1)
4986 /* There wasn't really an IT instruction after all. */
4987 return bpaddr;
4988
4989 if (last_it_count < 1)
4990 /* It was too far away. */
4991 return bpaddr;
4992
4993 /* This really is a trouble spot. Move the breakpoint to the IT
4994 instruction. */
4995 return bpaddr - buf_len + last_it;
4996 }
4997
4998 /* ARM displaced stepping support.
4999
5000 Generally ARM displaced stepping works as follows:
5001
5002 1. When an instruction is to be single-stepped, it is first decoded by
5003 arm_process_displaced_insn. Depending on the type of instruction, it is
5004 then copied to a scratch location, possibly in a modified form. The
5005 copy_* set of functions performs such modification, as necessary. A
5006 breakpoint is placed after the modified instruction in the scratch space
5007 to return control to GDB. Note in particular that instructions which
5008 modify the PC will no longer do so after modification.
5009
5010 2. The instruction is single-stepped, by setting the PC to the scratch
5011 location address, and resuming. Control returns to GDB when the
5012 breakpoint is hit.
5013
5014 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5015 function used for the current instruction. This function's job is to
5016 put the CPU/memory state back to what it would have been if the
5017 instruction had been executed unmodified in its original location. */
5018
5019 /* NOP instruction (mov r0, r0). */
5020 #define ARM_NOP 0xe1a00000
5021 #define THUMB_NOP 0x4600
5022
5023 /* Helper for register reads for displaced stepping. In particular, this
5024 returns the PC as it would be seen by the instruction at its original
5025 location. */
5026
5027 ULONGEST
5028 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5029 int regno)
5030 {
5031 ULONGEST ret;
5032 CORE_ADDR from = dsc->insn_addr;
5033
5034 if (regno == ARM_PC_REGNUM)
5035 {
5036 /* Compute pipeline offset:
5037 - When executing an ARM instruction, PC reads as the address of the
5038 current instruction plus 8.
5039 - When executing a Thumb instruction, PC reads as the address of the
5040 current instruction plus 4. */
5041
5042 if (!dsc->is_thumb)
5043 from += 8;
5044 else
5045 from += 4;
5046
5047 displaced_debug_printf ("read pc value %.8lx",
5048 (unsigned long) from);
5049 return (ULONGEST) from;
5050 }
5051 else
5052 {
5053 regcache_cooked_read_unsigned (regs, regno, &ret);
5054
5055 displaced_debug_printf ("read r%d value %.8lx",
5056 regno, (unsigned long) ret);
5057
5058 return ret;
5059 }
5060 }
5061
5062 static int
5063 displaced_in_arm_mode (struct regcache *regs)
5064 {
5065 ULONGEST ps;
5066 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5067
5068 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5069
5070 return (ps & t_bit) == 0;
5071 }
5072
5073 /* Write to the PC as from a branch instruction. */
5074
5075 static void
5076 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5077 ULONGEST val)
5078 {
5079 if (!dsc->is_thumb)
5080 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5081 architecture versions < 6. */
5082 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5083 val & ~(ULONGEST) 0x3);
5084 else
5085 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5086 val & ~(ULONGEST) 0x1);
5087 }
5088
5089 /* Write to the PC as from a branch-exchange instruction. */
5090
5091 static void
5092 bx_write_pc (struct regcache *regs, ULONGEST val)
5093 {
5094 ULONGEST ps;
5095 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5096
5097 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5098
5099 if ((val & 1) == 1)
5100 {
5101 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5102 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5103 }
5104 else if ((val & 2) == 0)
5105 {
5106 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5107 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5108 }
5109 else
5110 {
5111 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5112 mode, align dest to 4 bytes). */
5113 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5114 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5115 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5116 }
5117 }
5118
5119 /* Write to the PC as if from a load instruction. */
5120
5121 static void
5122 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5123 ULONGEST val)
5124 {
5125 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5126 bx_write_pc (regs, val);
5127 else
5128 branch_write_pc (regs, dsc, val);
5129 }
5130
5131 /* Write to the PC as if from an ALU instruction. */
5132
5133 static void
5134 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5135 ULONGEST val)
5136 {
5137 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5138 bx_write_pc (regs, val);
5139 else
5140 branch_write_pc (regs, dsc, val);
5141 }
5142
5143 /* Helper for writing to registers for displaced stepping. Writing to the PC
5144 has a varying effects depending on the instruction which does the write:
5145 this is controlled by the WRITE_PC argument. */
5146
5147 void
5148 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5149 int regno, ULONGEST val, enum pc_write_style write_pc)
5150 {
5151 if (regno == ARM_PC_REGNUM)
5152 {
5153 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5154
5155 switch (write_pc)
5156 {
5157 case BRANCH_WRITE_PC:
5158 branch_write_pc (regs, dsc, val);
5159 break;
5160
5161 case BX_WRITE_PC:
5162 bx_write_pc (regs, val);
5163 break;
5164
5165 case LOAD_WRITE_PC:
5166 load_write_pc (regs, dsc, val);
5167 break;
5168
5169 case ALU_WRITE_PC:
5170 alu_write_pc (regs, dsc, val);
5171 break;
5172
5173 case CANNOT_WRITE_PC:
5174 warning (_("Instruction wrote to PC in an unexpected way when "
5175 "single-stepping"));
5176 break;
5177
5178 default:
5179 internal_error (__FILE__, __LINE__,
5180 _("Invalid argument to displaced_write_reg"));
5181 }
5182
5183 dsc->wrote_to_pc = 1;
5184 }
5185 else
5186 {
5187 displaced_debug_printf ("writing r%d value %.8lx",
5188 regno, (unsigned long) val);
5189 regcache_cooked_write_unsigned (regs, regno, val);
5190 }
5191 }
5192
5193 /* This function is used to concisely determine if an instruction INSN
5194 references PC. Register fields of interest in INSN should have the
5195 corresponding fields of BITMASK set to 0b1111. The function
5196 returns return 1 if any of these fields in INSN reference the PC
5197 (also 0b1111, r15), else it returns 0. */
5198
5199 static int
5200 insn_references_pc (uint32_t insn, uint32_t bitmask)
5201 {
5202 uint32_t lowbit = 1;
5203
5204 while (bitmask != 0)
5205 {
5206 uint32_t mask;
5207
5208 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5209 ;
5210
5211 if (!lowbit)
5212 break;
5213
5214 mask = lowbit * 0xf;
5215
5216 if ((insn & mask) == mask)
5217 return 1;
5218
5219 bitmask &= ~mask;
5220 }
5221
5222 return 0;
5223 }
5224
5225 /* The simplest copy function. Many instructions have the same effect no
5226 matter what address they are executed at: in those cases, use this. */
5227
5228 static int
5229 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5230 arm_displaced_step_copy_insn_closure *dsc)
5231 {
5232 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5233 (unsigned long) insn, iname);
5234
5235 dsc->modinsn[0] = insn;
5236
5237 return 0;
5238 }
5239
5240 static int
5241 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5242 uint16_t insn2, const char *iname,
5243 arm_displaced_step_copy_insn_closure *dsc)
5244 {
5245 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5246 "unmodified", insn1, insn2, iname);
5247
5248 dsc->modinsn[0] = insn1;
5249 dsc->modinsn[1] = insn2;
5250 dsc->numinsns = 2;
5251
5252 return 0;
5253 }
5254
5255 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5256 modification. */
5257 static int
5258 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5259 const char *iname,
5260 arm_displaced_step_copy_insn_closure *dsc)
5261 {
5262 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5263 insn, iname);
5264
5265 dsc->modinsn[0] = insn;
5266
5267 return 0;
5268 }
5269
5270 /* Preload instructions with immediate offset. */
5271
5272 static void
5273 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5274 arm_displaced_step_copy_insn_closure *dsc)
5275 {
5276 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5277 if (!dsc->u.preload.immed)
5278 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5279 }
5280
5281 static void
5282 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5283 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5284 {
5285 ULONGEST rn_val;
5286 /* Preload instructions:
5287
5288 {pli/pld} [rn, #+/-imm]
5289 ->
5290 {pli/pld} [r0, #+/-imm]. */
5291
5292 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5293 rn_val = displaced_read_reg (regs, dsc, rn);
5294 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5295 dsc->u.preload.immed = 1;
5296
5297 dsc->cleanup = &cleanup_preload;
5298 }
5299
5300 static int
5301 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5302 arm_displaced_step_copy_insn_closure *dsc)
5303 {
5304 unsigned int rn = bits (insn, 16, 19);
5305
5306 if (!insn_references_pc (insn, 0x000f0000ul))
5307 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5308
5309 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5310
5311 dsc->modinsn[0] = insn & 0xfff0ffff;
5312
5313 install_preload (gdbarch, regs, dsc, rn);
5314
5315 return 0;
5316 }
5317
5318 static int
5319 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5320 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5321 {
5322 unsigned int rn = bits (insn1, 0, 3);
5323 unsigned int u_bit = bit (insn1, 7);
5324 int imm12 = bits (insn2, 0, 11);
5325 ULONGEST pc_val;
5326
5327 if (rn != ARM_PC_REGNUM)
5328 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5329
5330 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5331 PLD (literal) Encoding T1. */
5332 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5333 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5334 imm12);
5335
5336 if (!u_bit)
5337 imm12 = -1 * imm12;
5338
5339 /* Rewrite instruction {pli/pld} PC imm12 into:
5340 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5341
5342 {pli/pld} [r0, r1]
5343
5344 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5345
5346 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5347 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5348
5349 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5350
5351 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5352 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5353 dsc->u.preload.immed = 0;
5354
5355 /* {pli/pld} [r0, r1] */
5356 dsc->modinsn[0] = insn1 & 0xfff0;
5357 dsc->modinsn[1] = 0xf001;
5358 dsc->numinsns = 2;
5359
5360 dsc->cleanup = &cleanup_preload;
5361 return 0;
5362 }
5363
5364 /* Preload instructions with register offset. */
5365
5366 static void
5367 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5368 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5369 unsigned int rm)
5370 {
5371 ULONGEST rn_val, rm_val;
5372
5373 /* Preload register-offset instructions:
5374
5375 {pli/pld} [rn, rm {, shift}]
5376 ->
5377 {pli/pld} [r0, r1 {, shift}]. */
5378
5379 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5380 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5381 rn_val = displaced_read_reg (regs, dsc, rn);
5382 rm_val = displaced_read_reg (regs, dsc, rm);
5383 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5384 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5385 dsc->u.preload.immed = 0;
5386
5387 dsc->cleanup = &cleanup_preload;
5388 }
5389
5390 static int
5391 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5392 struct regcache *regs,
5393 arm_displaced_step_copy_insn_closure *dsc)
5394 {
5395 unsigned int rn = bits (insn, 16, 19);
5396 unsigned int rm = bits (insn, 0, 3);
5397
5398
5399 if (!insn_references_pc (insn, 0x000f000ful))
5400 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5401
5402 displaced_debug_printf ("copying preload insn %.8lx",
5403 (unsigned long) insn);
5404
5405 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5406
5407 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5408 return 0;
5409 }
5410
5411 /* Copy/cleanup coprocessor load and store instructions. */
5412
5413 static void
5414 cleanup_copro_load_store (struct gdbarch *gdbarch,
5415 struct regcache *regs,
5416 arm_displaced_step_copy_insn_closure *dsc)
5417 {
5418 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5419
5420 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5421
5422 if (dsc->u.ldst.writeback)
5423 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5424 }
5425
5426 static void
5427 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5428 arm_displaced_step_copy_insn_closure *dsc,
5429 int writeback, unsigned int rn)
5430 {
5431 ULONGEST rn_val;
5432
5433 /* Coprocessor load/store instructions:
5434
5435 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5436 ->
5437 {stc/stc2} [r0, #+/-imm].
5438
5439 ldc/ldc2 are handled identically. */
5440
5441 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5442 rn_val = displaced_read_reg (regs, dsc, rn);
5443 /* PC should be 4-byte aligned. */
5444 rn_val = rn_val & 0xfffffffc;
5445 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5446
5447 dsc->u.ldst.writeback = writeback;
5448 dsc->u.ldst.rn = rn;
5449
5450 dsc->cleanup = &cleanup_copro_load_store;
5451 }
5452
5453 static int
5454 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5455 struct regcache *regs,
5456 arm_displaced_step_copy_insn_closure *dsc)
5457 {
5458 unsigned int rn = bits (insn, 16, 19);
5459
5460 if (!insn_references_pc (insn, 0x000f0000ul))
5461 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5462
5463 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5464 (unsigned long) insn);
5465
5466 dsc->modinsn[0] = insn & 0xfff0ffff;
5467
5468 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5469
5470 return 0;
5471 }
5472
5473 static int
5474 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5475 uint16_t insn2, struct regcache *regs,
5476 arm_displaced_step_copy_insn_closure *dsc)
5477 {
5478 unsigned int rn = bits (insn1, 0, 3);
5479
5480 if (rn != ARM_PC_REGNUM)
5481 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5482 "copro load/store", dsc);
5483
5484 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5485 insn1, insn2);
5486
5487 dsc->modinsn[0] = insn1 & 0xfff0;
5488 dsc->modinsn[1] = insn2;
5489 dsc->numinsns = 2;
5490
5491 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5492 doesn't support writeback, so pass 0. */
5493 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5494
5495 return 0;
5496 }
5497
5498 /* Clean up branch instructions (actually perform the branch, by setting
5499 PC). */
5500
5501 static void
5502 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5503 arm_displaced_step_copy_insn_closure *dsc)
5504 {
5505 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5506 int branch_taken = condition_true (dsc->u.branch.cond, status);
5507 enum pc_write_style write_pc = dsc->u.branch.exchange
5508 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5509
5510 if (!branch_taken)
5511 return;
5512
5513 if (dsc->u.branch.link)
5514 {
5515 /* The value of LR should be the next insn of current one. In order
5516 not to confuse logic handling later insn `bx lr', if current insn mode
5517 is Thumb, the bit 0 of LR value should be set to 1. */
5518 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5519
5520 if (dsc->is_thumb)
5521 next_insn_addr |= 0x1;
5522
5523 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5524 CANNOT_WRITE_PC);
5525 }
5526
5527 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5528 }
5529
5530 /* Copy B/BL/BLX instructions with immediate destinations. */
5531
5532 static void
5533 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5534 arm_displaced_step_copy_insn_closure *dsc,
5535 unsigned int cond, int exchange, int link, long offset)
5536 {
5537 /* Implement "BL<cond> <label>" as:
5538
5539 Preparation: cond <- instruction condition
5540 Insn: mov r0, r0 (nop)
5541 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5542
5543 B<cond> similar, but don't set r14 in cleanup. */
5544
5545 dsc->u.branch.cond = cond;
5546 dsc->u.branch.link = link;
5547 dsc->u.branch.exchange = exchange;
5548
5549 dsc->u.branch.dest = dsc->insn_addr;
5550 if (link && exchange)
5551 /* For BLX, offset is computed from the Align (PC, 4). */
5552 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5553
5554 if (dsc->is_thumb)
5555 dsc->u.branch.dest += 4 + offset;
5556 else
5557 dsc->u.branch.dest += 8 + offset;
5558
5559 dsc->cleanup = &cleanup_branch;
5560 }
5561 static int
5562 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5563 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5564 {
5565 unsigned int cond = bits (insn, 28, 31);
5566 int exchange = (cond == 0xf);
5567 int link = exchange || bit (insn, 24);
5568 long offset;
5569
5570 displaced_debug_printf ("copying %s immediate insn %.8lx",
5571 (exchange) ? "blx" : (link) ? "bl" : "b",
5572 (unsigned long) insn);
5573 if (exchange)
5574 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5575 then arrange the switch into Thumb mode. */
5576 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5577 else
5578 offset = bits (insn, 0, 23) << 2;
5579
5580 if (bit (offset, 25))
5581 offset = offset | ~0x3ffffff;
5582
5583 dsc->modinsn[0] = ARM_NOP;
5584
5585 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5586 return 0;
5587 }
5588
5589 static int
5590 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5591 uint16_t insn2, struct regcache *regs,
5592 arm_displaced_step_copy_insn_closure *dsc)
5593 {
5594 int link = bit (insn2, 14);
5595 int exchange = link && !bit (insn2, 12);
5596 int cond = INST_AL;
5597 long offset = 0;
5598 int j1 = bit (insn2, 13);
5599 int j2 = bit (insn2, 11);
5600 int s = sbits (insn1, 10, 10);
5601 int i1 = !(j1 ^ bit (insn1, 10));
5602 int i2 = !(j2 ^ bit (insn1, 10));
5603
5604 if (!link && !exchange) /* B */
5605 {
5606 offset = (bits (insn2, 0, 10) << 1);
5607 if (bit (insn2, 12)) /* Encoding T4 */
5608 {
5609 offset |= (bits (insn1, 0, 9) << 12)
5610 | (i2 << 22)
5611 | (i1 << 23)
5612 | (s << 24);
5613 cond = INST_AL;
5614 }
5615 else /* Encoding T3 */
5616 {
5617 offset |= (bits (insn1, 0, 5) << 12)
5618 | (j1 << 18)
5619 | (j2 << 19)
5620 | (s << 20);
5621 cond = bits (insn1, 6, 9);
5622 }
5623 }
5624 else
5625 {
5626 offset = (bits (insn1, 0, 9) << 12);
5627 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5628 offset |= exchange ?
5629 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5630 }
5631
5632 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5633 link ? (exchange) ? "blx" : "bl" : "b",
5634 insn1, insn2, offset);
5635
5636 dsc->modinsn[0] = THUMB_NOP;
5637
5638 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5639 return 0;
5640 }
5641
5642 /* Copy B Thumb instructions. */
5643 static int
5644 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5645 arm_displaced_step_copy_insn_closure *dsc)
5646 {
5647 unsigned int cond = 0;
5648 int offset = 0;
5649 unsigned short bit_12_15 = bits (insn, 12, 15);
5650 CORE_ADDR from = dsc->insn_addr;
5651
5652 if (bit_12_15 == 0xd)
5653 {
5654 /* offset = SignExtend (imm8:0, 32) */
5655 offset = sbits ((insn << 1), 0, 8);
5656 cond = bits (insn, 8, 11);
5657 }
5658 else if (bit_12_15 == 0xe) /* Encoding T2 */
5659 {
5660 offset = sbits ((insn << 1), 0, 11);
5661 cond = INST_AL;
5662 }
5663
5664 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5665 insn, offset);
5666
5667 dsc->u.branch.cond = cond;
5668 dsc->u.branch.link = 0;
5669 dsc->u.branch.exchange = 0;
5670 dsc->u.branch.dest = from + 4 + offset;
5671
5672 dsc->modinsn[0] = THUMB_NOP;
5673
5674 dsc->cleanup = &cleanup_branch;
5675
5676 return 0;
5677 }
5678
5679 /* Copy BX/BLX with register-specified destinations. */
5680
5681 static void
5682 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5683 arm_displaced_step_copy_insn_closure *dsc, int link,
5684 unsigned int cond, unsigned int rm)
5685 {
5686 /* Implement {BX,BLX}<cond> <reg>" as:
5687
5688 Preparation: cond <- instruction condition
5689 Insn: mov r0, r0 (nop)
5690 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5691
5692 Don't set r14 in cleanup for BX. */
5693
5694 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5695
5696 dsc->u.branch.cond = cond;
5697 dsc->u.branch.link = link;
5698
5699 dsc->u.branch.exchange = 1;
5700
5701 dsc->cleanup = &cleanup_branch;
5702 }
5703
5704 static int
5705 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5706 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5707 {
5708 unsigned int cond = bits (insn, 28, 31);
5709 /* BX: x12xxx1x
5710 BLX: x12xxx3x. */
5711 int link = bit (insn, 5);
5712 unsigned int rm = bits (insn, 0, 3);
5713
5714 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5715
5716 dsc->modinsn[0] = ARM_NOP;
5717
5718 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5719 return 0;
5720 }
5721
5722 static int
5723 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5724 struct regcache *regs,
5725 arm_displaced_step_copy_insn_closure *dsc)
5726 {
5727 int link = bit (insn, 7);
5728 unsigned int rm = bits (insn, 3, 6);
5729
5730 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5731
5732 dsc->modinsn[0] = THUMB_NOP;
5733
5734 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5735
5736 return 0;
5737 }
5738
5739
5740 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5741
5742 static void
5743 cleanup_alu_imm (struct gdbarch *gdbarch,
5744 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5745 {
5746 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5747 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5748 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5749 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5750 }
5751
5752 static int
5753 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5754 arm_displaced_step_copy_insn_closure *dsc)
5755 {
5756 unsigned int rn = bits (insn, 16, 19);
5757 unsigned int rd = bits (insn, 12, 15);
5758 unsigned int op = bits (insn, 21, 24);
5759 int is_mov = (op == 0xd);
5760 ULONGEST rd_val, rn_val;
5761
5762 if (!insn_references_pc (insn, 0x000ff000ul))
5763 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5764
5765 displaced_debug_printf ("copying immediate %s insn %.8lx",
5766 is_mov ? "move" : "ALU",
5767 (unsigned long) insn);
5768
5769 /* Instruction is of form:
5770
5771 <op><cond> rd, [rn,] #imm
5772
5773 Rewrite as:
5774
5775 Preparation: tmp1, tmp2 <- r0, r1;
5776 r0, r1 <- rd, rn
5777 Insn: <op><cond> r0, r1, #imm
5778 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5779 */
5780
5781 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5782 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5783 rn_val = displaced_read_reg (regs, dsc, rn);
5784 rd_val = displaced_read_reg (regs, dsc, rd);
5785 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5786 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5787 dsc->rd = rd;
5788
5789 if (is_mov)
5790 dsc->modinsn[0] = insn & 0xfff00fff;
5791 else
5792 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5793
5794 dsc->cleanup = &cleanup_alu_imm;
5795
5796 return 0;
5797 }
5798
5799 static int
5800 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5801 uint16_t insn2, struct regcache *regs,
5802 arm_displaced_step_copy_insn_closure *dsc)
5803 {
5804 unsigned int op = bits (insn1, 5, 8);
5805 unsigned int rn, rm, rd;
5806 ULONGEST rd_val, rn_val;
5807
5808 rn = bits (insn1, 0, 3); /* Rn */
5809 rm = bits (insn2, 0, 3); /* Rm */
5810 rd = bits (insn2, 8, 11); /* Rd */
5811
5812 /* This routine is only called for instruction MOV. */
5813 gdb_assert (op == 0x2 && rn == 0xf);
5814
5815 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5817
5818 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5819
5820 /* Instruction is of form:
5821
5822 <op><cond> rd, [rn,] #imm
5823
5824 Rewrite as:
5825
5826 Preparation: tmp1, tmp2 <- r0, r1;
5827 r0, r1 <- rd, rn
5828 Insn: <op><cond> r0, r1, #imm
5829 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5830 */
5831
5832 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5833 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5834 rn_val = displaced_read_reg (regs, dsc, rn);
5835 rd_val = displaced_read_reg (regs, dsc, rd);
5836 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5837 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5838 dsc->rd = rd;
5839
5840 dsc->modinsn[0] = insn1;
5841 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5842 dsc->numinsns = 2;
5843
5844 dsc->cleanup = &cleanup_alu_imm;
5845
5846 return 0;
5847 }
5848
5849 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5850
5851 static void
5852 cleanup_alu_reg (struct gdbarch *gdbarch,
5853 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5854 {
5855 ULONGEST rd_val;
5856 int i;
5857
5858 rd_val = displaced_read_reg (regs, dsc, 0);
5859
5860 for (i = 0; i < 3; i++)
5861 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5862
5863 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5864 }
5865
5866 static void
5867 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5868 arm_displaced_step_copy_insn_closure *dsc,
5869 unsigned int rd, unsigned int rn, unsigned int rm)
5870 {
5871 ULONGEST rd_val, rn_val, rm_val;
5872
5873 /* Instruction is of form:
5874
5875 <op><cond> rd, [rn,] rm [, <shift>]
5876
5877 Rewrite as:
5878
5879 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5880 r0, r1, r2 <- rd, rn, rm
5881 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5882 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5883 */
5884
5885 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5886 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5887 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5888 rd_val = displaced_read_reg (regs, dsc, rd);
5889 rn_val = displaced_read_reg (regs, dsc, rn);
5890 rm_val = displaced_read_reg (regs, dsc, rm);
5891 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5892 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5893 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5894 dsc->rd = rd;
5895
5896 dsc->cleanup = &cleanup_alu_reg;
5897 }
5898
5899 static int
5900 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5901 arm_displaced_step_copy_insn_closure *dsc)
5902 {
5903 unsigned int op = bits (insn, 21, 24);
5904 int is_mov = (op == 0xd);
5905
5906 if (!insn_references_pc (insn, 0x000ff00ful))
5907 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5908
5909 displaced_debug_printf ("copying reg %s insn %.8lx",
5910 is_mov ? "move" : "ALU", (unsigned long) insn);
5911
5912 if (is_mov)
5913 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5914 else
5915 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5916
5917 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5918 bits (insn, 0, 3));
5919 return 0;
5920 }
5921
5922 static int
5923 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5924 struct regcache *regs,
5925 arm_displaced_step_copy_insn_closure *dsc)
5926 {
5927 unsigned rm, rd;
5928
5929 rm = bits (insn, 3, 6);
5930 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5931
5932 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5933 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5934
5935 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5936
5937 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5938
5939 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5940
5941 return 0;
5942 }
5943
5944 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5945
5946 static void
5947 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5948 struct regcache *regs,
5949 arm_displaced_step_copy_insn_closure *dsc)
5950 {
5951 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5952 int i;
5953
5954 for (i = 0; i < 4; i++)
5955 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5956
5957 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5958 }
5959
5960 static void
5961 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5962 arm_displaced_step_copy_insn_closure *dsc,
5963 unsigned int rd, unsigned int rn, unsigned int rm,
5964 unsigned rs)
5965 {
5966 int i;
5967 ULONGEST rd_val, rn_val, rm_val, rs_val;
5968
5969 /* Instruction is of form:
5970
5971 <op><cond> rd, [rn,] rm, <shift> rs
5972
5973 Rewrite as:
5974
5975 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5976 r0, r1, r2, r3 <- rd, rn, rm, rs
5977 Insn: <op><cond> r0, r1, r2, <shift> r3
5978 Cleanup: tmp5 <- r0
5979 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5980 rd <- tmp5
5981 */
5982
5983 for (i = 0; i < 4; i++)
5984 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5985
5986 rd_val = displaced_read_reg (regs, dsc, rd);
5987 rn_val = displaced_read_reg (regs, dsc, rn);
5988 rm_val = displaced_read_reg (regs, dsc, rm);
5989 rs_val = displaced_read_reg (regs, dsc, rs);
5990 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5991 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5992 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5993 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5994 dsc->rd = rd;
5995 dsc->cleanup = &cleanup_alu_shifted_reg;
5996 }
5997
5998 static int
5999 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6000 struct regcache *regs,
6001 arm_displaced_step_copy_insn_closure *dsc)
6002 {
6003 unsigned int op = bits (insn, 21, 24);
6004 int is_mov = (op == 0xd);
6005 unsigned int rd, rn, rm, rs;
6006
6007 if (!insn_references_pc (insn, 0x000fff0ful))
6008 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6009
6010 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6011 is_mov ? "move" : "ALU",
6012 (unsigned long) insn);
6013
6014 rn = bits (insn, 16, 19);
6015 rm = bits (insn, 0, 3);
6016 rs = bits (insn, 8, 11);
6017 rd = bits (insn, 12, 15);
6018
6019 if (is_mov)
6020 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6021 else
6022 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6023
6024 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6025
6026 return 0;
6027 }
6028
6029 /* Clean up load instructions. */
6030
6031 static void
6032 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6033 arm_displaced_step_copy_insn_closure *dsc)
6034 {
6035 ULONGEST rt_val, rt_val2 = 0, rn_val;
6036
6037 rt_val = displaced_read_reg (regs, dsc, 0);
6038 if (dsc->u.ldst.xfersize == 8)
6039 rt_val2 = displaced_read_reg (regs, dsc, 1);
6040 rn_val = displaced_read_reg (regs, dsc, 2);
6041
6042 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6043 if (dsc->u.ldst.xfersize > 4)
6044 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6045 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6046 if (!dsc->u.ldst.immed)
6047 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6048
6049 /* Handle register writeback. */
6050 if (dsc->u.ldst.writeback)
6051 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6052 /* Put result in right place. */
6053 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6054 if (dsc->u.ldst.xfersize == 8)
6055 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6056 }
6057
6058 /* Clean up store instructions. */
6059
6060 static void
6061 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6062 arm_displaced_step_copy_insn_closure *dsc)
6063 {
6064 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6065
6066 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6067 if (dsc->u.ldst.xfersize > 4)
6068 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6069 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6070 if (!dsc->u.ldst.immed)
6071 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6072 if (!dsc->u.ldst.restore_r4)
6073 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6074
6075 /* Writeback. */
6076 if (dsc->u.ldst.writeback)
6077 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6078 }
6079
6080 /* Copy "extra" load/store instructions. These are halfword/doubleword
6081 transfers, which have a different encoding to byte/word transfers. */
6082
6083 static int
6084 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6085 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6086 {
6087 unsigned int op1 = bits (insn, 20, 24);
6088 unsigned int op2 = bits (insn, 5, 6);
6089 unsigned int rt = bits (insn, 12, 15);
6090 unsigned int rn = bits (insn, 16, 19);
6091 unsigned int rm = bits (insn, 0, 3);
6092 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6093 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6094 int immed = (op1 & 0x4) != 0;
6095 int opcode;
6096 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6097
6098 if (!insn_references_pc (insn, 0x000ff00ful))
6099 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6100
6101 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6102 unprivileged ? "unprivileged " : "",
6103 (unsigned long) insn);
6104
6105 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6106
6107 if (opcode < 0)
6108 internal_error (__FILE__, __LINE__,
6109 _("copy_extra_ld_st: instruction decode error"));
6110
6111 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6112 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6113 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6114 if (!immed)
6115 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6116
6117 rt_val = displaced_read_reg (regs, dsc, rt);
6118 if (bytesize[opcode] == 8)
6119 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6120 rn_val = displaced_read_reg (regs, dsc, rn);
6121 if (!immed)
6122 rm_val = displaced_read_reg (regs, dsc, rm);
6123
6124 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6125 if (bytesize[opcode] == 8)
6126 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6127 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6128 if (!immed)
6129 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6130
6131 dsc->rd = rt;
6132 dsc->u.ldst.xfersize = bytesize[opcode];
6133 dsc->u.ldst.rn = rn;
6134 dsc->u.ldst.immed = immed;
6135 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6136 dsc->u.ldst.restore_r4 = 0;
6137
6138 if (immed)
6139 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6140 ->
6141 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6142 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6143 else
6144 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6145 ->
6146 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6147 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6148
6149 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6150
6151 return 0;
6152 }
6153
6154 /* Copy byte/half word/word loads and stores. */
6155
6156 static void
6157 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6158 arm_displaced_step_copy_insn_closure *dsc, int load,
6159 int immed, int writeback, int size, int usermode,
6160 int rt, int rm, int rn)
6161 {
6162 ULONGEST rt_val, rn_val, rm_val = 0;
6163
6164 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6165 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6166 if (!immed)
6167 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6168 if (!load)
6169 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6170
6171 rt_val = displaced_read_reg (regs, dsc, rt);
6172 rn_val = displaced_read_reg (regs, dsc, rn);
6173 if (!immed)
6174 rm_val = displaced_read_reg (regs, dsc, rm);
6175
6176 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6177 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6178 if (!immed)
6179 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6180 dsc->rd = rt;
6181 dsc->u.ldst.xfersize = size;
6182 dsc->u.ldst.rn = rn;
6183 dsc->u.ldst.immed = immed;
6184 dsc->u.ldst.writeback = writeback;
6185
6186 /* To write PC we can do:
6187
6188 Before this sequence of instructions:
6189 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6190 r2 is the Rn value got from displaced_read_reg.
6191
6192 Insn1: push {pc} Write address of STR instruction + offset on stack
6193 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6194 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6195 = addr(Insn1) + offset - addr(Insn3) - 8
6196 = offset - 16
6197 Insn4: add r4, r4, #8 r4 = offset - 8
6198 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6199 = from + offset
6200 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6201
6202 Otherwise we don't know what value to write for PC, since the offset is
6203 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6204 of this can be found in Section "Saving from r15" in
6205 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6206
6207 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6208 }
6209
6210
6211 static int
6212 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6213 uint16_t insn2, struct regcache *regs,
6214 arm_displaced_step_copy_insn_closure *dsc, int size)
6215 {
6216 unsigned int u_bit = bit (insn1, 7);
6217 unsigned int rt = bits (insn2, 12, 15);
6218 int imm12 = bits (insn2, 0, 11);
6219 ULONGEST pc_val;
6220
6221 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6222 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6223 imm12);
6224
6225 if (!u_bit)
6226 imm12 = -1 * imm12;
6227
6228 /* Rewrite instruction LDR Rt imm12 into:
6229
6230 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6231
6232 LDR R0, R2, R3,
6233
6234 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6235
6236
6237 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6238 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6239 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6240
6241 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6242
6243 pc_val = pc_val & 0xfffffffc;
6244
6245 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6246 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6247
6248 dsc->rd = rt;
6249
6250 dsc->u.ldst.xfersize = size;
6251 dsc->u.ldst.immed = 0;
6252 dsc->u.ldst.writeback = 0;
6253 dsc->u.ldst.restore_r4 = 0;
6254
6255 /* LDR R0, R2, R3 */
6256 dsc->modinsn[0] = 0xf852;
6257 dsc->modinsn[1] = 0x3;
6258 dsc->numinsns = 2;
6259
6260 dsc->cleanup = &cleanup_load;
6261
6262 return 0;
6263 }
6264
6265 static int
6266 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6267 uint16_t insn2, struct regcache *regs,
6268 arm_displaced_step_copy_insn_closure *dsc,
6269 int writeback, int immed)
6270 {
6271 unsigned int rt = bits (insn2, 12, 15);
6272 unsigned int rn = bits (insn1, 0, 3);
6273 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6274 /* In LDR (register), there is also a register Rm, which is not allowed to
6275 be PC, so we don't have to check it. */
6276
6277 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6278 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6279 dsc);
6280
6281 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6282 rt, rn, insn1, insn2);
6283
6284 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6285 0, rt, rm, rn);
6286
6287 dsc->u.ldst.restore_r4 = 0;
6288
6289 if (immed)
6290 /* ldr[b]<cond> rt, [rn, #imm], etc.
6291 ->
6292 ldr[b]<cond> r0, [r2, #imm]. */
6293 {
6294 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6295 dsc->modinsn[1] = insn2 & 0x0fff;
6296 }
6297 else
6298 /* ldr[b]<cond> rt, [rn, rm], etc.
6299 ->
6300 ldr[b]<cond> r0, [r2, r3]. */
6301 {
6302 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6303 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6304 }
6305
6306 dsc->numinsns = 2;
6307
6308 return 0;
6309 }
6310
6311
6312 static int
6313 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6314 struct regcache *regs,
6315 arm_displaced_step_copy_insn_closure *dsc,
6316 int load, int size, int usermode)
6317 {
6318 int immed = !bit (insn, 25);
6319 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6320 unsigned int rt = bits (insn, 12, 15);
6321 unsigned int rn = bits (insn, 16, 19);
6322 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6323
6324 if (!insn_references_pc (insn, 0x000ff00ful))
6325 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6326
6327 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6328 load ? (size == 1 ? "ldrb" : "ldr")
6329 : (size == 1 ? "strb" : "str"),
6330 usermode ? "t" : "",
6331 rt, rn,
6332 (unsigned long) insn);
6333
6334 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6335 usermode, rt, rm, rn);
6336
6337 if (load || rt != ARM_PC_REGNUM)
6338 {
6339 dsc->u.ldst.restore_r4 = 0;
6340
6341 if (immed)
6342 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6343 ->
6344 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6345 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6346 else
6347 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6348 ->
6349 {ldr,str}[b]<cond> r0, [r2, r3]. */
6350 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6351 }
6352 else
6353 {
6354 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6355 dsc->u.ldst.restore_r4 = 1;
6356 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6357 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6358 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6359 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6360 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6361
6362 /* As above. */
6363 if (immed)
6364 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6365 else
6366 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6367
6368 dsc->numinsns = 6;
6369 }
6370
6371 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6372
6373 return 0;
6374 }
6375
6376 /* Cleanup LDM instructions with fully-populated register list. This is an
6377 unfortunate corner case: it's impossible to implement correctly by modifying
6378 the instruction. The issue is as follows: we have an instruction,
6379
6380 ldm rN, {r0-r15}
6381
6382 which we must rewrite to avoid loading PC. A possible solution would be to
6383 do the load in two halves, something like (with suitable cleanup
6384 afterwards):
6385
6386 mov r8, rN
6387 ldm[id][ab] r8!, {r0-r7}
6388 str r7, <temp>
6389 ldm[id][ab] r8, {r7-r14}
6390 <bkpt>
6391
6392 but at present there's no suitable place for <temp>, since the scratch space
6393 is overwritten before the cleanup routine is called. For now, we simply
6394 emulate the instruction. */
6395
6396 static void
6397 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6398 arm_displaced_step_copy_insn_closure *dsc)
6399 {
6400 int inc = dsc->u.block.increment;
6401 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6402 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6403 uint32_t regmask = dsc->u.block.regmask;
6404 int regno = inc ? 0 : 15;
6405 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6406 int exception_return = dsc->u.block.load && dsc->u.block.user
6407 && (regmask & 0x8000) != 0;
6408 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6409 int do_transfer = condition_true (dsc->u.block.cond, status);
6410 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6411
6412 if (!do_transfer)
6413 return;
6414
6415 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6416 sensible we can do here. Complain loudly. */
6417 if (exception_return)
6418 error (_("Cannot single-step exception return"));
6419
6420 /* We don't handle any stores here for now. */
6421 gdb_assert (dsc->u.block.load != 0);
6422
6423 displaced_debug_printf ("emulating block transfer: %s %s %s",
6424 dsc->u.block.load ? "ldm" : "stm",
6425 dsc->u.block.increment ? "inc" : "dec",
6426 dsc->u.block.before ? "before" : "after");
6427
6428 while (regmask)
6429 {
6430 uint32_t memword;
6431
6432 if (inc)
6433 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6434 regno++;
6435 else
6436 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6437 regno--;
6438
6439 xfer_addr += bump_before;
6440
6441 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6442 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6443
6444 xfer_addr += bump_after;
6445
6446 regmask &= ~(1 << regno);
6447 }
6448
6449 if (dsc->u.block.writeback)
6450 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6451 CANNOT_WRITE_PC);
6452 }
6453
6454 /* Clean up an STM which included the PC in the register list. */
6455
6456 static void
6457 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6458 arm_displaced_step_copy_insn_closure *dsc)
6459 {
6460 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6461 int store_executed = condition_true (dsc->u.block.cond, status);
6462 CORE_ADDR pc_stored_at, transferred_regs
6463 = count_one_bits (dsc->u.block.regmask);
6464 CORE_ADDR stm_insn_addr;
6465 uint32_t pc_val;
6466 long offset;
6467 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6468
6469 /* If condition code fails, there's nothing else to do. */
6470 if (!store_executed)
6471 return;
6472
6473 if (dsc->u.block.increment)
6474 {
6475 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6476
6477 if (dsc->u.block.before)
6478 pc_stored_at += 4;
6479 }
6480 else
6481 {
6482 pc_stored_at = dsc->u.block.xfer_addr;
6483
6484 if (dsc->u.block.before)
6485 pc_stored_at -= 4;
6486 }
6487
6488 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6489 stm_insn_addr = dsc->scratch_base;
6490 offset = pc_val - stm_insn_addr;
6491
6492 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6493 offset);
6494
6495 /* Rewrite the stored PC to the proper value for the non-displaced original
6496 instruction. */
6497 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6498 dsc->insn_addr + offset);
6499 }
6500
6501 /* Clean up an LDM which includes the PC in the register list. We clumped all
6502 the registers in the transferred list into a contiguous range r0...rX (to
6503 avoid loading PC directly and losing control of the debugged program), so we
6504 must undo that here. */
6505
6506 static void
6507 cleanup_block_load_pc (struct gdbarch *gdbarch,
6508 struct regcache *regs,
6509 arm_displaced_step_copy_insn_closure *dsc)
6510 {
6511 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6512 int load_executed = condition_true (dsc->u.block.cond, status);
6513 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6514 unsigned int regs_loaded = count_one_bits (mask);
6515 unsigned int num_to_shuffle = regs_loaded, clobbered;
6516
6517 /* The method employed here will fail if the register list is fully populated
6518 (we need to avoid loading PC directly). */
6519 gdb_assert (num_to_shuffle < 16);
6520
6521 if (!load_executed)
6522 return;
6523
6524 clobbered = (1 << num_to_shuffle) - 1;
6525
6526 while (num_to_shuffle > 0)
6527 {
6528 if ((mask & (1 << write_reg)) != 0)
6529 {
6530 unsigned int read_reg = num_to_shuffle - 1;
6531
6532 if (read_reg != write_reg)
6533 {
6534 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6535 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6536 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6537 read_reg, write_reg);
6538 }
6539 else
6540 displaced_debug_printf ("LDM: register r%d already in the right "
6541 "place", write_reg);
6542
6543 clobbered &= ~(1 << write_reg);
6544
6545 num_to_shuffle--;
6546 }
6547
6548 write_reg--;
6549 }
6550
6551 /* Restore any registers we scribbled over. */
6552 for (write_reg = 0; clobbered != 0; write_reg++)
6553 {
6554 if ((clobbered & (1 << write_reg)) != 0)
6555 {
6556 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6557 CANNOT_WRITE_PC);
6558 displaced_debug_printf ("LDM: restored clobbered register r%d",
6559 write_reg);
6560 clobbered &= ~(1 << write_reg);
6561 }
6562 }
6563
6564 /* Perform register writeback manually. */
6565 if (dsc->u.block.writeback)
6566 {
6567 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6568
6569 if (dsc->u.block.increment)
6570 new_rn_val += regs_loaded * 4;
6571 else
6572 new_rn_val -= regs_loaded * 4;
6573
6574 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6575 CANNOT_WRITE_PC);
6576 }
6577 }
6578
6579 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6580 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6581
6582 static int
6583 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6584 struct regcache *regs,
6585 arm_displaced_step_copy_insn_closure *dsc)
6586 {
6587 int load = bit (insn, 20);
6588 int user = bit (insn, 22);
6589 int increment = bit (insn, 23);
6590 int before = bit (insn, 24);
6591 int writeback = bit (insn, 21);
6592 int rn = bits (insn, 16, 19);
6593
6594 /* Block transfers which don't mention PC can be run directly
6595 out-of-line. */
6596 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6597 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6598
6599 if (rn == ARM_PC_REGNUM)
6600 {
6601 warning (_("displaced: Unpredictable LDM or STM with "
6602 "base register r15"));
6603 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6604 }
6605
6606 displaced_debug_printf ("copying block transfer insn %.8lx",
6607 (unsigned long) insn);
6608
6609 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6610 dsc->u.block.rn = rn;
6611
6612 dsc->u.block.load = load;
6613 dsc->u.block.user = user;
6614 dsc->u.block.increment = increment;
6615 dsc->u.block.before = before;
6616 dsc->u.block.writeback = writeback;
6617 dsc->u.block.cond = bits (insn, 28, 31);
6618
6619 dsc->u.block.regmask = insn & 0xffff;
6620
6621 if (load)
6622 {
6623 if ((insn & 0xffff) == 0xffff)
6624 {
6625 /* LDM with a fully-populated register list. This case is
6626 particularly tricky. Implement for now by fully emulating the
6627 instruction (which might not behave perfectly in all cases, but
6628 these instructions should be rare enough for that not to matter
6629 too much). */
6630 dsc->modinsn[0] = ARM_NOP;
6631
6632 dsc->cleanup = &cleanup_block_load_all;
6633 }
6634 else
6635 {
6636 /* LDM of a list of registers which includes PC. Implement by
6637 rewriting the list of registers to be transferred into a
6638 contiguous chunk r0...rX before doing the transfer, then shuffling
6639 registers into the correct places in the cleanup routine. */
6640 unsigned int regmask = insn & 0xffff;
6641 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6642 unsigned int i;
6643
6644 for (i = 0; i < num_in_list; i++)
6645 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6646
6647 /* Writeback makes things complicated. We need to avoid clobbering
6648 the base register with one of the registers in our modified
6649 register list, but just using a different register can't work in
6650 all cases, e.g.:
6651
6652 ldm r14!, {r0-r13,pc}
6653
6654 which would need to be rewritten as:
6655
6656 ldm rN!, {r0-r14}
6657
6658 but that can't work, because there's no free register for N.
6659
6660 Solve this by turning off the writeback bit, and emulating
6661 writeback manually in the cleanup routine. */
6662
6663 if (writeback)
6664 insn &= ~(1 << 21);
6665
6666 new_regmask = (1 << num_in_list) - 1;
6667
6668 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6669 "%.4x, modified list %.4x",
6670 rn, writeback ? "!" : "",
6671 (int) insn & 0xffff, new_regmask);
6672
6673 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6674
6675 dsc->cleanup = &cleanup_block_load_pc;
6676 }
6677 }
6678 else
6679 {
6680 /* STM of a list of registers which includes PC. Run the instruction
6681 as-is, but out of line: this will store the wrong value for the PC,
6682 so we must manually fix up the memory in the cleanup routine.
6683 Doing things this way has the advantage that we can auto-detect
6684 the offset of the PC write (which is architecture-dependent) in
6685 the cleanup routine. */
6686 dsc->modinsn[0] = insn;
6687
6688 dsc->cleanup = &cleanup_block_store_pc;
6689 }
6690
6691 return 0;
6692 }
6693
6694 static int
6695 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6696 struct regcache *regs,
6697 arm_displaced_step_copy_insn_closure *dsc)
6698 {
6699 int rn = bits (insn1, 0, 3);
6700 int load = bit (insn1, 4);
6701 int writeback = bit (insn1, 5);
6702
6703 /* Block transfers which don't mention PC can be run directly
6704 out-of-line. */
6705 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6706 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6707
6708 if (rn == ARM_PC_REGNUM)
6709 {
6710 warning (_("displaced: Unpredictable LDM or STM with "
6711 "base register r15"));
6712 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6713 "unpredictable ldm/stm", dsc);
6714 }
6715
6716 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6717 insn1, insn2);
6718
6719 /* Clear bit 13, since it should be always zero. */
6720 dsc->u.block.regmask = (insn2 & 0xdfff);
6721 dsc->u.block.rn = rn;
6722
6723 dsc->u.block.load = load;
6724 dsc->u.block.user = 0;
6725 dsc->u.block.increment = bit (insn1, 7);
6726 dsc->u.block.before = bit (insn1, 8);
6727 dsc->u.block.writeback = writeback;
6728 dsc->u.block.cond = INST_AL;
6729 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6730
6731 if (load)
6732 {
6733 if (dsc->u.block.regmask == 0xffff)
6734 {
6735 /* This branch is impossible to happen. */
6736 gdb_assert (0);
6737 }
6738 else
6739 {
6740 unsigned int regmask = dsc->u.block.regmask;
6741 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6742 unsigned int i;
6743
6744 for (i = 0; i < num_in_list; i++)
6745 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6746
6747 if (writeback)
6748 insn1 &= ~(1 << 5);
6749
6750 new_regmask = (1 << num_in_list) - 1;
6751
6752 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6753 "%.4x, modified list %.4x",
6754 rn, writeback ? "!" : "",
6755 (int) dsc->u.block.regmask, new_regmask);
6756
6757 dsc->modinsn[0] = insn1;
6758 dsc->modinsn[1] = (new_regmask & 0xffff);
6759 dsc->numinsns = 2;
6760
6761 dsc->cleanup = &cleanup_block_load_pc;
6762 }
6763 }
6764 else
6765 {
6766 dsc->modinsn[0] = insn1;
6767 dsc->modinsn[1] = insn2;
6768 dsc->numinsns = 2;
6769 dsc->cleanup = &cleanup_block_store_pc;
6770 }
6771 return 0;
6772 }
6773
6774 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6775 This is used to avoid a dependency on BFD's bfd_endian enum. */
6776
6777 ULONGEST
6778 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6779 int byte_order)
6780 {
6781 return read_memory_unsigned_integer (memaddr, len,
6782 (enum bfd_endian) byte_order);
6783 }
6784
6785 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6786
6787 CORE_ADDR
6788 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6789 CORE_ADDR val)
6790 {
6791 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6792 }
6793
6794 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6795
6796 static CORE_ADDR
6797 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6798 {
6799 return 0;
6800 }
6801
6802 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6803
6804 int
6805 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6806 {
6807 return arm_is_thumb (self->regcache);
6808 }
6809
6810 /* single_step() is called just before we want to resume the inferior,
6811 if we want to single-step it but there is no hardware or kernel
6812 single-step support. We find the target of the coming instructions
6813 and breakpoint them. */
6814
6815 std::vector<CORE_ADDR>
6816 arm_software_single_step (struct regcache *regcache)
6817 {
6818 struct gdbarch *gdbarch = regcache->arch ();
6819 struct arm_get_next_pcs next_pcs_ctx;
6820
6821 arm_get_next_pcs_ctor (&next_pcs_ctx,
6822 &arm_get_next_pcs_ops,
6823 gdbarch_byte_order (gdbarch),
6824 gdbarch_byte_order_for_code (gdbarch),
6825 0,
6826 regcache);
6827
6828 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6829
6830 for (CORE_ADDR &pc_ref : next_pcs)
6831 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6832
6833 return next_pcs;
6834 }
6835
6836 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6837 for Linux, where some SVC instructions must be treated specially. */
6838
6839 static void
6840 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6841 arm_displaced_step_copy_insn_closure *dsc)
6842 {
6843 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6844
6845 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6846 (unsigned long) resume_addr);
6847
6848 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6849 }
6850
6851
6852 /* Common copy routine for svc instruction. */
6853
6854 static int
6855 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6856 arm_displaced_step_copy_insn_closure *dsc)
6857 {
6858 /* Preparation: none.
6859 Insn: unmodified svc.
6860 Cleanup: pc <- insn_addr + insn_size. */
6861
6862 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6863 instruction. */
6864 dsc->wrote_to_pc = 1;
6865
6866 /* Allow OS-specific code to override SVC handling. */
6867 if (dsc->u.svc.copy_svc_os)
6868 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6869 else
6870 {
6871 dsc->cleanup = &cleanup_svc;
6872 return 0;
6873 }
6874 }
6875
6876 static int
6877 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6878 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6879 {
6880
6881 displaced_debug_printf ("copying svc insn %.8lx",
6882 (unsigned long) insn);
6883
6884 dsc->modinsn[0] = insn;
6885
6886 return install_svc (gdbarch, regs, dsc);
6887 }
6888
6889 static int
6890 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6891 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6892 {
6893
6894 displaced_debug_printf ("copying svc insn %.4x", insn);
6895
6896 dsc->modinsn[0] = insn;
6897
6898 return install_svc (gdbarch, regs, dsc);
6899 }
6900
6901 /* Copy undefined instructions. */
6902
6903 static int
6904 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6905 arm_displaced_step_copy_insn_closure *dsc)
6906 {
6907 displaced_debug_printf ("copying undefined insn %.8lx",
6908 (unsigned long) insn);
6909
6910 dsc->modinsn[0] = insn;
6911
6912 return 0;
6913 }
6914
6915 static int
6916 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6917 arm_displaced_step_copy_insn_closure *dsc)
6918 {
6919
6920 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6921 (unsigned short) insn1, (unsigned short) insn2);
6922
6923 dsc->modinsn[0] = insn1;
6924 dsc->modinsn[1] = insn2;
6925 dsc->numinsns = 2;
6926
6927 return 0;
6928 }
6929
6930 /* Copy unpredictable instructions. */
6931
6932 static int
6933 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6934 arm_displaced_step_copy_insn_closure *dsc)
6935 {
6936 displaced_debug_printf ("copying unpredictable insn %.8lx",
6937 (unsigned long) insn);
6938
6939 dsc->modinsn[0] = insn;
6940
6941 return 0;
6942 }
6943
6944 /* The decode_* functions are instruction decoding helpers. They mostly follow
6945 the presentation in the ARM ARM. */
6946
6947 static int
6948 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6949 struct regcache *regs,
6950 arm_displaced_step_copy_insn_closure *dsc)
6951 {
6952 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6953 unsigned int rn = bits (insn, 16, 19);
6954
6955 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6956 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6957 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6958 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6959 else if ((op1 & 0x60) == 0x20)
6960 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6961 else if ((op1 & 0x71) == 0x40)
6962 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6963 dsc);
6964 else if ((op1 & 0x77) == 0x41)
6965 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6966 else if ((op1 & 0x77) == 0x45)
6967 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6968 else if ((op1 & 0x77) == 0x51)
6969 {
6970 if (rn != 0xf)
6971 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6972 else
6973 return arm_copy_unpred (gdbarch, insn, dsc);
6974 }
6975 else if ((op1 & 0x77) == 0x55)
6976 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6977 else if (op1 == 0x57)
6978 switch (op2)
6979 {
6980 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6981 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6982 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6983 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6984 default: return arm_copy_unpred (gdbarch, insn, dsc);
6985 }
6986 else if ((op1 & 0x63) == 0x43)
6987 return arm_copy_unpred (gdbarch, insn, dsc);
6988 else if ((op2 & 0x1) == 0x0)
6989 switch (op1 & ~0x80)
6990 {
6991 case 0x61:
6992 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6993 case 0x65:
6994 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6995 case 0x71: case 0x75:
6996 /* pld/pldw reg. */
6997 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6998 case 0x63: case 0x67: case 0x73: case 0x77:
6999 return arm_copy_unpred (gdbarch, insn, dsc);
7000 default:
7001 return arm_copy_undef (gdbarch, insn, dsc);
7002 }
7003 else
7004 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7005 }
7006
7007 static int
7008 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7009 struct regcache *regs,
7010 arm_displaced_step_copy_insn_closure *dsc)
7011 {
7012 if (bit (insn, 27) == 0)
7013 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7014 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7015 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7016 {
7017 case 0x0: case 0x2:
7018 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7019
7020 case 0x1: case 0x3:
7021 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7022
7023 case 0x4: case 0x5: case 0x6: case 0x7:
7024 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7025
7026 case 0x8:
7027 switch ((insn & 0xe00000) >> 21)
7028 {
7029 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7030 /* stc/stc2. */
7031 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7032
7033 case 0x2:
7034 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7035
7036 default:
7037 return arm_copy_undef (gdbarch, insn, dsc);
7038 }
7039
7040 case 0x9:
7041 {
7042 int rn_f = (bits (insn, 16, 19) == 0xf);
7043 switch ((insn & 0xe00000) >> 21)
7044 {
7045 case 0x1: case 0x3:
7046 /* ldc/ldc2 imm (undefined for rn == pc). */
7047 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7048 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7049
7050 case 0x2:
7051 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7052
7053 case 0x4: case 0x5: case 0x6: case 0x7:
7054 /* ldc/ldc2 lit (undefined for rn != pc). */
7055 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7056 : arm_copy_undef (gdbarch, insn, dsc);
7057
7058 default:
7059 return arm_copy_undef (gdbarch, insn, dsc);
7060 }
7061 }
7062
7063 case 0xa:
7064 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7065
7066 case 0xb:
7067 if (bits (insn, 16, 19) == 0xf)
7068 /* ldc/ldc2 lit. */
7069 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7070 else
7071 return arm_copy_undef (gdbarch, insn, dsc);
7072
7073 case 0xc:
7074 if (bit (insn, 4))
7075 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7076 else
7077 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7078
7079 case 0xd:
7080 if (bit (insn, 4))
7081 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7082 else
7083 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7084
7085 default:
7086 return arm_copy_undef (gdbarch, insn, dsc);
7087 }
7088 }
7089
7090 /* Decode miscellaneous instructions in dp/misc encoding space. */
7091
7092 static int
7093 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7094 struct regcache *regs,
7095 arm_displaced_step_copy_insn_closure *dsc)
7096 {
7097 unsigned int op2 = bits (insn, 4, 6);
7098 unsigned int op = bits (insn, 21, 22);
7099
7100 switch (op2)
7101 {
7102 case 0x0:
7103 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7104
7105 case 0x1:
7106 if (op == 0x1) /* bx. */
7107 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7108 else if (op == 0x3)
7109 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7110 else
7111 return arm_copy_undef (gdbarch, insn, dsc);
7112
7113 case 0x2:
7114 if (op == 0x1)
7115 /* Not really supported. */
7116 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7117 else
7118 return arm_copy_undef (gdbarch, insn, dsc);
7119
7120 case 0x3:
7121 if (op == 0x1)
7122 return arm_copy_bx_blx_reg (gdbarch, insn,
7123 regs, dsc); /* blx register. */
7124 else
7125 return arm_copy_undef (gdbarch, insn, dsc);
7126
7127 case 0x5:
7128 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7129
7130 case 0x7:
7131 if (op == 0x1)
7132 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7133 else if (op == 0x3)
7134 /* Not really supported. */
7135 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7136 /* Fall through. */
7137
7138 default:
7139 return arm_copy_undef (gdbarch, insn, dsc);
7140 }
7141 }
7142
7143 static int
7144 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7145 struct regcache *regs,
7146 arm_displaced_step_copy_insn_closure *dsc)
7147 {
7148 if (bit (insn, 25))
7149 switch (bits (insn, 20, 24))
7150 {
7151 case 0x10:
7152 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7153
7154 case 0x14:
7155 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7156
7157 case 0x12: case 0x16:
7158 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7159
7160 default:
7161 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7162 }
7163 else
7164 {
7165 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7166
7167 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7168 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7169 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7170 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7171 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7172 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7173 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7174 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7175 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7176 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7177 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7178 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7179 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7180 /* 2nd arg means "unprivileged". */
7181 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7182 dsc);
7183 }
7184
7185 /* Should be unreachable. */
7186 return 1;
7187 }
7188
7189 static int
7190 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7191 struct regcache *regs,
7192 arm_displaced_step_copy_insn_closure *dsc)
7193 {
7194 int a = bit (insn, 25), b = bit (insn, 4);
7195 uint32_t op1 = bits (insn, 20, 24);
7196
7197 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7198 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7199 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7200 else if ((!a && (op1 & 0x17) == 0x02)
7201 || (a && (op1 & 0x17) == 0x02 && !b))
7202 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7203 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7204 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7205 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7206 else if ((!a && (op1 & 0x17) == 0x03)
7207 || (a && (op1 & 0x17) == 0x03 && !b))
7208 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7209 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7210 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7211 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7212 else if ((!a && (op1 & 0x17) == 0x06)
7213 || (a && (op1 & 0x17) == 0x06 && !b))
7214 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7215 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7216 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7217 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7218 else if ((!a && (op1 & 0x17) == 0x07)
7219 || (a && (op1 & 0x17) == 0x07 && !b))
7220 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7221
7222 /* Should be unreachable. */
7223 return 1;
7224 }
7225
7226 static int
7227 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7228 arm_displaced_step_copy_insn_closure *dsc)
7229 {
7230 switch (bits (insn, 20, 24))
7231 {
7232 case 0x00: case 0x01: case 0x02: case 0x03:
7233 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7234
7235 case 0x04: case 0x05: case 0x06: case 0x07:
7236 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7237
7238 case 0x08: case 0x09: case 0x0a: case 0x0b:
7239 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7240 return arm_copy_unmodified (gdbarch, insn,
7241 "decode/pack/unpack/saturate/reverse", dsc);
7242
7243 case 0x18:
7244 if (bits (insn, 5, 7) == 0) /* op2. */
7245 {
7246 if (bits (insn, 12, 15) == 0xf)
7247 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7248 else
7249 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7250 }
7251 else
7252 return arm_copy_undef (gdbarch, insn, dsc);
7253
7254 case 0x1a: case 0x1b:
7255 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7256 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7257 else
7258 return arm_copy_undef (gdbarch, insn, dsc);
7259
7260 case 0x1c: case 0x1d:
7261 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7262 {
7263 if (bits (insn, 0, 3) == 0xf)
7264 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7265 else
7266 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7267 }
7268 else
7269 return arm_copy_undef (gdbarch, insn, dsc);
7270
7271 case 0x1e: case 0x1f:
7272 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7273 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7274 else
7275 return arm_copy_undef (gdbarch, insn, dsc);
7276 }
7277
7278 /* Should be unreachable. */
7279 return 1;
7280 }
7281
7282 static int
7283 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7284 struct regcache *regs,
7285 arm_displaced_step_copy_insn_closure *dsc)
7286 {
7287 if (bit (insn, 25))
7288 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7289 else
7290 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7291 }
7292
7293 static int
7294 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7295 struct regcache *regs,
7296 arm_displaced_step_copy_insn_closure *dsc)
7297 {
7298 unsigned int opcode = bits (insn, 20, 24);
7299
7300 switch (opcode)
7301 {
7302 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7303 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7304
7305 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7306 case 0x12: case 0x16:
7307 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7308
7309 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7310 case 0x13: case 0x17:
7311 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7312
7313 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7314 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7315 /* Note: no writeback for these instructions. Bit 25 will always be
7316 zero though (via caller), so the following works OK. */
7317 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7318 }
7319
7320 /* Should be unreachable. */
7321 return 1;
7322 }
7323
7324 /* Decode shifted register instructions. */
7325
7326 static int
7327 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7328 uint16_t insn2, struct regcache *regs,
7329 arm_displaced_step_copy_insn_closure *dsc)
7330 {
7331 /* PC is only allowed to be used in instruction MOV. */
7332
7333 unsigned int op = bits (insn1, 5, 8);
7334 unsigned int rn = bits (insn1, 0, 3);
7335
7336 if (op == 0x2 && rn == 0xf) /* MOV */
7337 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7338 else
7339 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7340 "dp (shift reg)", dsc);
7341 }
7342
7343
7344 /* Decode extension register load/store. Exactly the same as
7345 arm_decode_ext_reg_ld_st. */
7346
7347 static int
7348 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7349 uint16_t insn2, struct regcache *regs,
7350 arm_displaced_step_copy_insn_closure *dsc)
7351 {
7352 unsigned int opcode = bits (insn1, 4, 8);
7353
7354 switch (opcode)
7355 {
7356 case 0x04: case 0x05:
7357 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7358 "vfp/neon vmov", dsc);
7359
7360 case 0x08: case 0x0c: /* 01x00 */
7361 case 0x0a: case 0x0e: /* 01x10 */
7362 case 0x12: case 0x16: /* 10x10 */
7363 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7364 "vfp/neon vstm/vpush", dsc);
7365
7366 case 0x09: case 0x0d: /* 01x01 */
7367 case 0x0b: case 0x0f: /* 01x11 */
7368 case 0x13: case 0x17: /* 10x11 */
7369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 "vfp/neon vldm/vpop", dsc);
7371
7372 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7373 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7374 "vstr", dsc);
7375 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7376 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7377 }
7378
7379 /* Should be unreachable. */
7380 return 1;
7381 }
7382
7383 static int
7384 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7385 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7386 {
7387 unsigned int op1 = bits (insn, 20, 25);
7388 int op = bit (insn, 4);
7389 unsigned int coproc = bits (insn, 8, 11);
7390
7391 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7392 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7393 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7394 && (coproc & 0xe) != 0xa)
7395 /* stc/stc2. */
7396 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7397 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7398 && (coproc & 0xe) != 0xa)
7399 /* ldc/ldc2 imm/lit. */
7400 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7401 else if ((op1 & 0x3e) == 0x00)
7402 return arm_copy_undef (gdbarch, insn, dsc);
7403 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7404 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7405 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7406 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7407 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7408 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7409 else if ((op1 & 0x30) == 0x20 && !op)
7410 {
7411 if ((coproc & 0xe) == 0xa)
7412 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7413 else
7414 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7415 }
7416 else if ((op1 & 0x30) == 0x20 && op)
7417 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7418 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7419 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7420 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7421 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7422 else if ((op1 & 0x30) == 0x30)
7423 return arm_copy_svc (gdbarch, insn, regs, dsc);
7424 else
7425 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7426 }
7427
7428 static int
7429 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7430 uint16_t insn2, struct regcache *regs,
7431 arm_displaced_step_copy_insn_closure *dsc)
7432 {
7433 unsigned int coproc = bits (insn2, 8, 11);
7434 unsigned int bit_5_8 = bits (insn1, 5, 8);
7435 unsigned int bit_9 = bit (insn1, 9);
7436 unsigned int bit_4 = bit (insn1, 4);
7437
7438 if (bit_9 == 0)
7439 {
7440 if (bit_5_8 == 2)
7441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7443 dsc);
7444 else if (bit_5_8 == 0) /* UNDEFINED. */
7445 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7446 else
7447 {
7448 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7449 if ((coproc & 0xe) == 0xa)
7450 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7451 dsc);
7452 else /* coproc is not 101x. */
7453 {
7454 if (bit_4 == 0) /* STC/STC2. */
7455 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7456 "stc/stc2", dsc);
7457 else /* LDC/LDC2 {literal, immediate}. */
7458 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7459 regs, dsc);
7460 }
7461 }
7462 }
7463 else
7464 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7465
7466 return 0;
7467 }
7468
7469 static void
7470 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7471 arm_displaced_step_copy_insn_closure *dsc, int rd)
7472 {
7473 /* ADR Rd, #imm
7474
7475 Rewrite as:
7476
7477 Preparation: Rd <- PC
7478 Insn: ADD Rd, #imm
7479 Cleanup: Null.
7480 */
7481
7482 /* Rd <- PC */
7483 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7484 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7485 }
7486
7487 static int
7488 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7489 arm_displaced_step_copy_insn_closure *dsc,
7490 int rd, unsigned int imm)
7491 {
7492
7493 /* Encoding T2: ADDS Rd, #imm */
7494 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7495
7496 install_pc_relative (gdbarch, regs, dsc, rd);
7497
7498 return 0;
7499 }
7500
7501 static int
7502 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7503 struct regcache *regs,
7504 arm_displaced_step_copy_insn_closure *dsc)
7505 {
7506 unsigned int rd = bits (insn, 8, 10);
7507 unsigned int imm8 = bits (insn, 0, 7);
7508
7509 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7510 rd, imm8, insn);
7511
7512 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7513 }
7514
7515 static int
7516 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7517 uint16_t insn2, struct regcache *regs,
7518 arm_displaced_step_copy_insn_closure *dsc)
7519 {
7520 unsigned int rd = bits (insn2, 8, 11);
7521 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7522 extract raw immediate encoding rather than computing immediate. When
7523 generating ADD or SUB instruction, we can simply perform OR operation to
7524 set immediate into ADD. */
7525 unsigned int imm_3_8 = insn2 & 0x70ff;
7526 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7527
7528 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7529 rd, imm_i, imm_3_8, insn1, insn2);
7530
7531 if (bit (insn1, 7)) /* Encoding T2 */
7532 {
7533 /* Encoding T3: SUB Rd, Rd, #imm */
7534 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7535 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7536 }
7537 else /* Encoding T3 */
7538 {
7539 /* Encoding T3: ADD Rd, Rd, #imm */
7540 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7541 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7542 }
7543 dsc->numinsns = 2;
7544
7545 install_pc_relative (gdbarch, regs, dsc, rd);
7546
7547 return 0;
7548 }
7549
7550 static int
7551 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7552 struct regcache *regs,
7553 arm_displaced_step_copy_insn_closure *dsc)
7554 {
7555 unsigned int rt = bits (insn1, 8, 10);
7556 unsigned int pc;
7557 int imm8 = (bits (insn1, 0, 7) << 2);
7558
7559 /* LDR Rd, #imm8
7560
7561 Rwrite as:
7562
7563 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7564
7565 Insn: LDR R0, [R2, R3];
7566 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7567
7568 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7569
7570 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7571 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7572 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7573 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7574 /* The assembler calculates the required value of the offset from the
7575 Align(PC,4) value of this instruction to the label. */
7576 pc = pc & 0xfffffffc;
7577
7578 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7579 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7580
7581 dsc->rd = rt;
7582 dsc->u.ldst.xfersize = 4;
7583 dsc->u.ldst.rn = 0;
7584 dsc->u.ldst.immed = 0;
7585 dsc->u.ldst.writeback = 0;
7586 dsc->u.ldst.restore_r4 = 0;
7587
7588 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7589
7590 dsc->cleanup = &cleanup_load;
7591
7592 return 0;
7593 }
7594
7595 /* Copy Thumb cbnz/cbz instruction. */
7596
7597 static int
7598 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7599 struct regcache *regs,
7600 arm_displaced_step_copy_insn_closure *dsc)
7601 {
7602 int non_zero = bit (insn1, 11);
7603 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7604 CORE_ADDR from = dsc->insn_addr;
7605 int rn = bits (insn1, 0, 2);
7606 int rn_val = displaced_read_reg (regs, dsc, rn);
7607
7608 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7609 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7610 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7611 condition is false, let it be, cleanup_branch will do nothing. */
7612 if (dsc->u.branch.cond)
7613 {
7614 dsc->u.branch.cond = INST_AL;
7615 dsc->u.branch.dest = from + 4 + imm5;
7616 }
7617 else
7618 dsc->u.branch.dest = from + 2;
7619
7620 dsc->u.branch.link = 0;
7621 dsc->u.branch.exchange = 0;
7622
7623 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7624 non_zero ? "cbnz" : "cbz",
7625 rn, rn_val, insn1, dsc->u.branch.dest);
7626
7627 dsc->modinsn[0] = THUMB_NOP;
7628
7629 dsc->cleanup = &cleanup_branch;
7630 return 0;
7631 }
7632
7633 /* Copy Table Branch Byte/Halfword */
7634 static int
7635 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7636 uint16_t insn2, struct regcache *regs,
7637 arm_displaced_step_copy_insn_closure *dsc)
7638 {
7639 ULONGEST rn_val, rm_val;
7640 int is_tbh = bit (insn2, 4);
7641 CORE_ADDR halfwords = 0;
7642 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7643
7644 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7645 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7646
7647 if (is_tbh)
7648 {
7649 gdb_byte buf[2];
7650
7651 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7652 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7653 }
7654 else
7655 {
7656 gdb_byte buf[1];
7657
7658 target_read_memory (rn_val + rm_val, buf, 1);
7659 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7660 }
7661
7662 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7663 is_tbh ? "tbh" : "tbb",
7664 (unsigned int) rn_val, (unsigned int) rm_val,
7665 (unsigned int) halfwords);
7666
7667 dsc->u.branch.cond = INST_AL;
7668 dsc->u.branch.link = 0;
7669 dsc->u.branch.exchange = 0;
7670 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7671
7672 dsc->cleanup = &cleanup_branch;
7673
7674 return 0;
7675 }
7676
7677 static void
7678 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7679 arm_displaced_step_copy_insn_closure *dsc)
7680 {
7681 /* PC <- r7 */
7682 int val = displaced_read_reg (regs, dsc, 7);
7683 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7684
7685 /* r7 <- r8 */
7686 val = displaced_read_reg (regs, dsc, 8);
7687 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7688
7689 /* r8 <- tmp[0] */
7690 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7691
7692 }
7693
7694 static int
7695 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7696 struct regcache *regs,
7697 arm_displaced_step_copy_insn_closure *dsc)
7698 {
7699 dsc->u.block.regmask = insn1 & 0x00ff;
7700
7701 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7702 to :
7703
7704 (1) register list is full, that is, r0-r7 are used.
7705 Prepare: tmp[0] <- r8
7706
7707 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7708 MOV r8, r7; Move value of r7 to r8;
7709 POP {r7}; Store PC value into r7.
7710
7711 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7712
7713 (2) register list is not full, supposing there are N registers in
7714 register list (except PC, 0 <= N <= 7).
7715 Prepare: for each i, 0 - N, tmp[i] <- ri.
7716
7717 POP {r0, r1, ...., rN};
7718
7719 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7720 from tmp[] properly.
7721 */
7722 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7723 dsc->u.block.regmask, insn1);
7724
7725 if (dsc->u.block.regmask == 0xff)
7726 {
7727 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7728
7729 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7730 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7731 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7732
7733 dsc->numinsns = 3;
7734 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7735 }
7736 else
7737 {
7738 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7739 unsigned int i;
7740 unsigned int new_regmask;
7741
7742 for (i = 0; i < num_in_list + 1; i++)
7743 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7744
7745 new_regmask = (1 << (num_in_list + 1)) - 1;
7746
7747 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7748 "modified list %.4x",
7749 (int) dsc->u.block.regmask, new_regmask);
7750
7751 dsc->u.block.regmask |= 0x8000;
7752 dsc->u.block.writeback = 0;
7753 dsc->u.block.cond = INST_AL;
7754
7755 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7756
7757 dsc->cleanup = &cleanup_block_load_pc;
7758 }
7759
7760 return 0;
7761 }
7762
7763 static void
7764 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7765 struct regcache *regs,
7766 arm_displaced_step_copy_insn_closure *dsc)
7767 {
7768 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7769 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7770 int err = 0;
7771
7772 /* 16-bit thumb instructions. */
7773 switch (op_bit_12_15)
7774 {
7775 /* Shift (imme), add, subtract, move and compare. */
7776 case 0: case 1: case 2: case 3:
7777 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7778 "shift/add/sub/mov/cmp",
7779 dsc);
7780 break;
7781 case 4:
7782 switch (op_bit_10_11)
7783 {
7784 case 0: /* Data-processing */
7785 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7786 "data-processing",
7787 dsc);
7788 break;
7789 case 1: /* Special data instructions and branch and exchange. */
7790 {
7791 unsigned short op = bits (insn1, 7, 9);
7792 if (op == 6 || op == 7) /* BX or BLX */
7793 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7794 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7795 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7796 else
7797 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7798 dsc);
7799 }
7800 break;
7801 default: /* LDR (literal) */
7802 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7803 }
7804 break;
7805 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7806 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7807 break;
7808 case 10:
7809 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7810 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7811 else /* Generate SP-relative address */
7812 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7813 break;
7814 case 11: /* Misc 16-bit instructions */
7815 {
7816 switch (bits (insn1, 8, 11))
7817 {
7818 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7819 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7820 break;
7821 case 12: case 13: /* POP */
7822 if (bit (insn1, 8)) /* PC is in register list. */
7823 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7824 else
7825 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7826 break;
7827 case 15: /* If-Then, and hints */
7828 if (bits (insn1, 0, 3))
7829 /* If-Then makes up to four following instructions conditional.
7830 IT instruction itself is not conditional, so handle it as a
7831 common unmodified instruction. */
7832 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7833 dsc);
7834 else
7835 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7836 break;
7837 default:
7838 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7839 }
7840 }
7841 break;
7842 case 12:
7843 if (op_bit_10_11 < 2) /* Store multiple registers */
7844 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7845 else /* Load multiple registers */
7846 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7847 break;
7848 case 13: /* Conditional branch and supervisor call */
7849 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7850 err = thumb_copy_b (gdbarch, insn1, dsc);
7851 else
7852 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7853 break;
7854 case 14: /* Unconditional branch */
7855 err = thumb_copy_b (gdbarch, insn1, dsc);
7856 break;
7857 default:
7858 err = 1;
7859 }
7860
7861 if (err)
7862 internal_error (__FILE__, __LINE__,
7863 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7864 }
7865
7866 static int
7867 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7868 uint16_t insn1, uint16_t insn2,
7869 struct regcache *regs,
7870 arm_displaced_step_copy_insn_closure *dsc)
7871 {
7872 int rt = bits (insn2, 12, 15);
7873 int rn = bits (insn1, 0, 3);
7874 int op1 = bits (insn1, 7, 8);
7875
7876 switch (bits (insn1, 5, 6))
7877 {
7878 case 0: /* Load byte and memory hints */
7879 if (rt == 0xf) /* PLD/PLI */
7880 {
7881 if (rn == 0xf)
7882 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7883 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7884 else
7885 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7886 "pli/pld", dsc);
7887 }
7888 else
7889 {
7890 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7891 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7892 1);
7893 else
7894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7895 "ldrb{reg, immediate}/ldrbt",
7896 dsc);
7897 }
7898
7899 break;
7900 case 1: /* Load halfword and memory hints. */
7901 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7902 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7903 "pld/unalloc memhint", dsc);
7904 else
7905 {
7906 if (rn == 0xf)
7907 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7908 2);
7909 else
7910 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7911 "ldrh/ldrht", dsc);
7912 }
7913 break;
7914 case 2: /* Load word */
7915 {
7916 int insn2_bit_8_11 = bits (insn2, 8, 11);
7917
7918 if (rn == 0xf)
7919 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7920 else if (op1 == 0x1) /* Encoding T3 */
7921 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7922 0, 1);
7923 else /* op1 == 0x0 */
7924 {
7925 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7926 /* LDR (immediate) */
7927 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7928 dsc, bit (insn2, 8), 1);
7929 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7930 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7931 "ldrt", dsc);
7932 else
7933 /* LDR (register) */
7934 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7935 dsc, 0, 0);
7936 }
7937 break;
7938 }
7939 default:
7940 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7941 break;
7942 }
7943 return 0;
7944 }
7945
7946 static void
7947 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7948 uint16_t insn2, struct regcache *regs,
7949 arm_displaced_step_copy_insn_closure *dsc)
7950 {
7951 int err = 0;
7952 unsigned short op = bit (insn2, 15);
7953 unsigned int op1 = bits (insn1, 11, 12);
7954
7955 switch (op1)
7956 {
7957 case 1:
7958 {
7959 switch (bits (insn1, 9, 10))
7960 {
7961 case 0:
7962 if (bit (insn1, 6))
7963 {
7964 /* Load/store {dual, exclusive}, table branch. */
7965 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7966 && bits (insn2, 5, 7) == 0)
7967 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7968 dsc);
7969 else
7970 /* PC is not allowed to use in load/store {dual, exclusive}
7971 instructions. */
7972 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7973 "load/store dual/ex", dsc);
7974 }
7975 else /* load/store multiple */
7976 {
7977 switch (bits (insn1, 7, 8))
7978 {
7979 case 0: case 3: /* SRS, RFE */
7980 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7981 "srs/rfe", dsc);
7982 break;
7983 case 1: case 2: /* LDM/STM/PUSH/POP */
7984 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7985 break;
7986 }
7987 }
7988 break;
7989
7990 case 1:
7991 /* Data-processing (shift register). */
7992 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7993 dsc);
7994 break;
7995 default: /* Coprocessor instructions. */
7996 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7997 break;
7998 }
7999 break;
8000 }
8001 case 2: /* op1 = 2 */
8002 if (op) /* Branch and misc control. */
8003 {
8004 if (bit (insn2, 14) /* BLX/BL */
8005 || bit (insn2, 12) /* Unconditional branch */
8006 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8007 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8008 else
8009 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8010 "misc ctrl", dsc);
8011 }
8012 else
8013 {
8014 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8015 {
8016 int dp_op = bits (insn1, 4, 8);
8017 int rn = bits (insn1, 0, 3);
8018 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8019 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8020 regs, dsc);
8021 else
8022 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8023 "dp/pb", dsc);
8024 }
8025 else /* Data processing (modified immediate) */
8026 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8027 "dp/mi", dsc);
8028 }
8029 break;
8030 case 3: /* op1 = 3 */
8031 switch (bits (insn1, 9, 10))
8032 {
8033 case 0:
8034 if (bit (insn1, 4))
8035 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8036 regs, dsc);
8037 else /* NEON Load/Store and Store single data item */
8038 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8039 "neon elt/struct load/store",
8040 dsc);
8041 break;
8042 case 1: /* op1 = 3, bits (9, 10) == 1 */
8043 switch (bits (insn1, 7, 8))
8044 {
8045 case 0: case 1: /* Data processing (register) */
8046 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8047 "dp(reg)", dsc);
8048 break;
8049 case 2: /* Multiply and absolute difference */
8050 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8051 "mul/mua/diff", dsc);
8052 break;
8053 case 3: /* Long multiply and divide */
8054 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8055 "lmul/lmua", dsc);
8056 break;
8057 }
8058 break;
8059 default: /* Coprocessor instructions */
8060 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8061 break;
8062 }
8063 break;
8064 default:
8065 err = 1;
8066 }
8067
8068 if (err)
8069 internal_error (__FILE__, __LINE__,
8070 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8071
8072 }
8073
8074 static void
8075 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8076 struct regcache *regs,
8077 arm_displaced_step_copy_insn_closure *dsc)
8078 {
8079 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8080 uint16_t insn1
8081 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8082
8083 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8084 insn1, (unsigned long) from);
8085
8086 dsc->is_thumb = 1;
8087 dsc->insn_size = thumb_insn_size (insn1);
8088 if (thumb_insn_size (insn1) == 4)
8089 {
8090 uint16_t insn2
8091 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8092 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8093 }
8094 else
8095 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8096 }
8097
8098 void
8099 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8100 CORE_ADDR to, struct regcache *regs,
8101 arm_displaced_step_copy_insn_closure *dsc)
8102 {
8103 int err = 0;
8104 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8105 uint32_t insn;
8106
8107 /* Most displaced instructions use a 1-instruction scratch space, so set this
8108 here and override below if/when necessary. */
8109 dsc->numinsns = 1;
8110 dsc->insn_addr = from;
8111 dsc->scratch_base = to;
8112 dsc->cleanup = NULL;
8113 dsc->wrote_to_pc = 0;
8114
8115 if (!displaced_in_arm_mode (regs))
8116 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8117
8118 dsc->is_thumb = 0;
8119 dsc->insn_size = 4;
8120 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8121 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8122 (unsigned long) insn, (unsigned long) from);
8123
8124 if ((insn & 0xf0000000) == 0xf0000000)
8125 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8126 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8127 {
8128 case 0x0: case 0x1: case 0x2: case 0x3:
8129 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8130 break;
8131
8132 case 0x4: case 0x5: case 0x6:
8133 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8134 break;
8135
8136 case 0x7:
8137 err = arm_decode_media (gdbarch, insn, dsc);
8138 break;
8139
8140 case 0x8: case 0x9: case 0xa: case 0xb:
8141 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8142 break;
8143
8144 case 0xc: case 0xd: case 0xe: case 0xf:
8145 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8146 break;
8147 }
8148
8149 if (err)
8150 internal_error (__FILE__, __LINE__,
8151 _("arm_process_displaced_insn: Instruction decode error"));
8152 }
8153
8154 /* Actually set up the scratch space for a displaced instruction. */
8155
8156 void
8157 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8158 CORE_ADDR to,
8159 arm_displaced_step_copy_insn_closure *dsc)
8160 {
8161 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8162 unsigned int i, len, offset;
8163 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8164 int size = dsc->is_thumb? 2 : 4;
8165 const gdb_byte *bkp_insn;
8166
8167 offset = 0;
8168 /* Poke modified instruction(s). */
8169 for (i = 0; i < dsc->numinsns; i++)
8170 {
8171 if (size == 4)
8172 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8173 dsc->modinsn[i], (unsigned long) to + offset);
8174 else if (size == 2)
8175 displaced_debug_printf ("writing insn %.4x at %.8lx",
8176 (unsigned short) dsc->modinsn[i],
8177 (unsigned long) to + offset);
8178
8179 write_memory_unsigned_integer (to + offset, size,
8180 byte_order_for_code,
8181 dsc->modinsn[i]);
8182 offset += size;
8183 }
8184
8185 /* Choose the correct breakpoint instruction. */
8186 if (dsc->is_thumb)
8187 {
8188 bkp_insn = tdep->thumb_breakpoint;
8189 len = tdep->thumb_breakpoint_size;
8190 }
8191 else
8192 {
8193 bkp_insn = tdep->arm_breakpoint;
8194 len = tdep->arm_breakpoint_size;
8195 }
8196
8197 /* Put breakpoint afterwards. */
8198 write_memory (to + offset, bkp_insn, len);
8199
8200 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8201 paddress (gdbarch, to));
8202 }
8203
8204 /* Entry point for cleaning things up after a displaced instruction has been
8205 single-stepped. */
8206
8207 void
8208 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8209 struct displaced_step_copy_insn_closure *dsc_,
8210 CORE_ADDR from, CORE_ADDR to,
8211 struct regcache *regs)
8212 {
8213 arm_displaced_step_copy_insn_closure *dsc
8214 = (arm_displaced_step_copy_insn_closure *) dsc_;
8215
8216 if (dsc->cleanup)
8217 dsc->cleanup (gdbarch, regs, dsc);
8218
8219 if (!dsc->wrote_to_pc)
8220 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8221 dsc->insn_addr + dsc->insn_size);
8222
8223 }
8224
8225 #include "bfd-in2.h"
8226 #include "libcoff.h"
8227
8228 static int
8229 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8230 {
8231 gdb_disassembler *di
8232 = static_cast<gdb_disassembler *>(info->application_data);
8233 struct gdbarch *gdbarch = di->arch ();
8234
8235 if (arm_pc_is_thumb (gdbarch, memaddr))
8236 {
8237 static asymbol *asym;
8238 static combined_entry_type ce;
8239 static struct coff_symbol_struct csym;
8240 static struct bfd fake_bfd;
8241 static bfd_target fake_target;
8242
8243 if (csym.native == NULL)
8244 {
8245 /* Create a fake symbol vector containing a Thumb symbol.
8246 This is solely so that the code in print_insn_little_arm()
8247 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8248 the presence of a Thumb symbol and switch to decoding
8249 Thumb instructions. */
8250
8251 fake_target.flavour = bfd_target_coff_flavour;
8252 fake_bfd.xvec = &fake_target;
8253 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8254 csym.native = &ce;
8255 csym.symbol.the_bfd = &fake_bfd;
8256 csym.symbol.name = "fake";
8257 asym = (asymbol *) & csym;
8258 }
8259
8260 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8261 info->symbols = &asym;
8262 }
8263 else
8264 info->symbols = NULL;
8265
8266 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8267 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8268 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8269 the assert on the mismatch of info->mach and
8270 bfd_get_mach (current_program_space->exec_bfd ()) in
8271 default_print_insn. */
8272 if (current_program_space->exec_bfd () != NULL
8273 && (current_program_space->exec_bfd ()->arch_info
8274 == gdbarch_bfd_arch_info (gdbarch)))
8275 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8276
8277 return default_print_insn (memaddr, info);
8278 }
8279
8280 /* The following define instruction sequences that will cause ARM
8281 cpu's to take an undefined instruction trap. These are used to
8282 signal a breakpoint to GDB.
8283
8284 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8285 modes. A different instruction is required for each mode. The ARM
8286 cpu's can also be big or little endian. Thus four different
8287 instructions are needed to support all cases.
8288
8289 Note: ARMv4 defines several new instructions that will take the
8290 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8291 not in fact add the new instructions. The new undefined
8292 instructions in ARMv4 are all instructions that had no defined
8293 behaviour in earlier chips. There is no guarantee that they will
8294 raise an exception, but may be treated as NOP's. In practice, it
8295 may only safe to rely on instructions matching:
8296
8297 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8298 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8299 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8300
8301 Even this may only true if the condition predicate is true. The
8302 following use a condition predicate of ALWAYS so it is always TRUE.
8303
8304 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8305 and NetBSD all use a software interrupt rather than an undefined
8306 instruction to force a trap. This can be handled by by the
8307 abi-specific code during establishment of the gdbarch vector. */
8308
8309 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8310 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8311 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8312 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8313
8314 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8315 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8316 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8317 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8318
8319 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8320
8321 static int
8322 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8323 {
8324 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8325 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8326
8327 if (arm_pc_is_thumb (gdbarch, *pcptr))
8328 {
8329 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8330
8331 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8332 check whether we are replacing a 32-bit instruction. */
8333 if (tdep->thumb2_breakpoint != NULL)
8334 {
8335 gdb_byte buf[2];
8336
8337 if (target_read_memory (*pcptr, buf, 2) == 0)
8338 {
8339 unsigned short inst1;
8340
8341 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8342 if (thumb_insn_size (inst1) == 4)
8343 return ARM_BP_KIND_THUMB2;
8344 }
8345 }
8346
8347 return ARM_BP_KIND_THUMB;
8348 }
8349 else
8350 return ARM_BP_KIND_ARM;
8351
8352 }
8353
8354 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8355
8356 static const gdb_byte *
8357 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8358 {
8359 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8360
8361 switch (kind)
8362 {
8363 case ARM_BP_KIND_ARM:
8364 *size = tdep->arm_breakpoint_size;
8365 return tdep->arm_breakpoint;
8366 case ARM_BP_KIND_THUMB:
8367 *size = tdep->thumb_breakpoint_size;
8368 return tdep->thumb_breakpoint;
8369 case ARM_BP_KIND_THUMB2:
8370 *size = tdep->thumb2_breakpoint_size;
8371 return tdep->thumb2_breakpoint;
8372 default:
8373 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8374 }
8375 }
8376
8377 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8378
8379 static int
8380 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8381 struct regcache *regcache,
8382 CORE_ADDR *pcptr)
8383 {
8384 gdb_byte buf[4];
8385
8386 /* Check the memory pointed by PC is readable. */
8387 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8388 {
8389 struct arm_get_next_pcs next_pcs_ctx;
8390
8391 arm_get_next_pcs_ctor (&next_pcs_ctx,
8392 &arm_get_next_pcs_ops,
8393 gdbarch_byte_order (gdbarch),
8394 gdbarch_byte_order_for_code (gdbarch),
8395 0,
8396 regcache);
8397
8398 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8399
8400 /* If MEMADDR is the next instruction of current pc, do the
8401 software single step computation, and get the thumb mode by
8402 the destination address. */
8403 for (CORE_ADDR pc : next_pcs)
8404 {
8405 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8406 {
8407 if (IS_THUMB_ADDR (pc))
8408 {
8409 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8410 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8411 }
8412 else
8413 return ARM_BP_KIND_ARM;
8414 }
8415 }
8416 }
8417
8418 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8419 }
8420
8421 /* Extract from an array REGBUF containing the (raw) register state a
8422 function return value of type TYPE, and copy that, in virtual
8423 format, into VALBUF. */
8424
8425 static void
8426 arm_extract_return_value (struct type *type, struct regcache *regs,
8427 gdb_byte *valbuf)
8428 {
8429 struct gdbarch *gdbarch = regs->arch ();
8430 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8431 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8432
8433 if (TYPE_CODE_FLT == type->code ())
8434 {
8435 switch (tdep->fp_model)
8436 {
8437 case ARM_FLOAT_FPA:
8438 {
8439 /* The value is in register F0 in internal format. We need to
8440 extract the raw value and then convert it to the desired
8441 internal type. */
8442 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8443
8444 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8445 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8446 valbuf, type);
8447 }
8448 break;
8449
8450 case ARM_FLOAT_SOFT_FPA:
8451 case ARM_FLOAT_SOFT_VFP:
8452 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8453 not using the VFP ABI code. */
8454 case ARM_FLOAT_VFP:
8455 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8456 if (TYPE_LENGTH (type) > 4)
8457 regs->cooked_read (ARM_A1_REGNUM + 1,
8458 valbuf + ARM_INT_REGISTER_SIZE);
8459 break;
8460
8461 default:
8462 internal_error (__FILE__, __LINE__,
8463 _("arm_extract_return_value: "
8464 "Floating point model not supported"));
8465 break;
8466 }
8467 }
8468 else if (type->code () == TYPE_CODE_INT
8469 || type->code () == TYPE_CODE_CHAR
8470 || type->code () == TYPE_CODE_BOOL
8471 || type->code () == TYPE_CODE_PTR
8472 || TYPE_IS_REFERENCE (type)
8473 || type->code () == TYPE_CODE_ENUM
8474 || is_fixed_point_type (type))
8475 {
8476 /* If the type is a plain integer, then the access is
8477 straight-forward. Otherwise we have to play around a bit
8478 more. */
8479 int len = TYPE_LENGTH (type);
8480 int regno = ARM_A1_REGNUM;
8481 ULONGEST tmp;
8482
8483 while (len > 0)
8484 {
8485 /* By using store_unsigned_integer we avoid having to do
8486 anything special for small big-endian values. */
8487 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8488 store_unsigned_integer (valbuf,
8489 (len > ARM_INT_REGISTER_SIZE
8490 ? ARM_INT_REGISTER_SIZE : len),
8491 byte_order, tmp);
8492 len -= ARM_INT_REGISTER_SIZE;
8493 valbuf += ARM_INT_REGISTER_SIZE;
8494 }
8495 }
8496 else
8497 {
8498 /* For a structure or union the behaviour is as if the value had
8499 been stored to word-aligned memory and then loaded into
8500 registers with 32-bit load instruction(s). */
8501 int len = TYPE_LENGTH (type);
8502 int regno = ARM_A1_REGNUM;
8503 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8504
8505 while (len > 0)
8506 {
8507 regs->cooked_read (regno++, tmpbuf);
8508 memcpy (valbuf, tmpbuf,
8509 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8510 len -= ARM_INT_REGISTER_SIZE;
8511 valbuf += ARM_INT_REGISTER_SIZE;
8512 }
8513 }
8514 }
8515
8516
8517 /* Will a function return an aggregate type in memory or in a
8518 register? Return 0 if an aggregate type can be returned in a
8519 register, 1 if it must be returned in memory. */
8520
8521 static int
8522 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8523 {
8524 enum type_code code;
8525
8526 type = check_typedef (type);
8527
8528 /* Simple, non-aggregate types (ie not including vectors and
8529 complex) are always returned in a register (or registers). */
8530 code = type->code ();
8531 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8532 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8533 return 0;
8534
8535 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8536 {
8537 /* Vector values should be returned using ARM registers if they
8538 are not over 16 bytes. */
8539 return (TYPE_LENGTH (type) > 16);
8540 }
8541
8542 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8543 if (tdep->arm_abi != ARM_ABI_APCS)
8544 {
8545 /* The AAPCS says all aggregates not larger than a word are returned
8546 in a register. */
8547 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE
8548 && language_pass_by_reference (type).trivially_copyable)
8549 return 0;
8550
8551 return 1;
8552 }
8553 else
8554 {
8555 int nRc;
8556
8557 /* All aggregate types that won't fit in a register must be returned
8558 in memory. */
8559 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE
8560 || !language_pass_by_reference (type).trivially_copyable)
8561 return 1;
8562
8563 /* In the ARM ABI, "integer" like aggregate types are returned in
8564 registers. For an aggregate type to be integer like, its size
8565 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8566 offset of each addressable subfield must be zero. Note that bit
8567 fields are not addressable, and all addressable subfields of
8568 unions always start at offset zero.
8569
8570 This function is based on the behaviour of GCC 2.95.1.
8571 See: gcc/arm.c: arm_return_in_memory() for details.
8572
8573 Note: All versions of GCC before GCC 2.95.2 do not set up the
8574 parameters correctly for a function returning the following
8575 structure: struct { float f;}; This should be returned in memory,
8576 not a register. Richard Earnshaw sent me a patch, but I do not
8577 know of any way to detect if a function like the above has been
8578 compiled with the correct calling convention. */
8579
8580 /* Assume all other aggregate types can be returned in a register.
8581 Run a check for structures, unions and arrays. */
8582 nRc = 0;
8583
8584 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8585 {
8586 int i;
8587 /* Need to check if this struct/union is "integer" like. For
8588 this to be true, its size must be less than or equal to
8589 ARM_INT_REGISTER_SIZE and the offset of each addressable
8590 subfield must be zero. Note that bit fields are not
8591 addressable, and unions always start at offset zero. If any
8592 of the subfields is a floating point type, the struct/union
8593 cannot be an integer type. */
8594
8595 /* For each field in the object, check:
8596 1) Is it FP? --> yes, nRc = 1;
8597 2) Is it addressable (bitpos != 0) and
8598 not packed (bitsize == 0)?
8599 --> yes, nRc = 1
8600 */
8601
8602 for (i = 0; i < type->num_fields (); i++)
8603 {
8604 enum type_code field_type_code;
8605
8606 field_type_code
8607 = check_typedef (type->field (i).type ())->code ();
8608
8609 /* Is it a floating point type field? */
8610 if (field_type_code == TYPE_CODE_FLT)
8611 {
8612 nRc = 1;
8613 break;
8614 }
8615
8616 /* If bitpos != 0, then we have to care about it. */
8617 if (type->field (i).loc_bitpos () != 0)
8618 {
8619 /* Bitfields are not addressable. If the field bitsize is
8620 zero, then the field is not packed. Hence it cannot be
8621 a bitfield or any other packed type. */
8622 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8623 {
8624 nRc = 1;
8625 break;
8626 }
8627 }
8628 }
8629 }
8630
8631 return nRc;
8632 }
8633 }
8634
8635 /* Write into appropriate registers a function return value of type
8636 TYPE, given in virtual format. */
8637
8638 static void
8639 arm_store_return_value (struct type *type, struct regcache *regs,
8640 const gdb_byte *valbuf)
8641 {
8642 struct gdbarch *gdbarch = regs->arch ();
8643 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8644
8645 if (type->code () == TYPE_CODE_FLT)
8646 {
8647 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8648 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8649
8650 switch (tdep->fp_model)
8651 {
8652 case ARM_FLOAT_FPA:
8653
8654 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8655 regs->cooked_write (ARM_F0_REGNUM, buf);
8656 break;
8657
8658 case ARM_FLOAT_SOFT_FPA:
8659 case ARM_FLOAT_SOFT_VFP:
8660 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8661 not using the VFP ABI code. */
8662 case ARM_FLOAT_VFP:
8663 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8664 if (TYPE_LENGTH (type) > 4)
8665 regs->cooked_write (ARM_A1_REGNUM + 1,
8666 valbuf + ARM_INT_REGISTER_SIZE);
8667 break;
8668
8669 default:
8670 internal_error (__FILE__, __LINE__,
8671 _("arm_store_return_value: Floating "
8672 "point model not supported"));
8673 break;
8674 }
8675 }
8676 else if (type->code () == TYPE_CODE_INT
8677 || type->code () == TYPE_CODE_CHAR
8678 || type->code () == TYPE_CODE_BOOL
8679 || type->code () == TYPE_CODE_PTR
8680 || TYPE_IS_REFERENCE (type)
8681 || type->code () == TYPE_CODE_ENUM)
8682 {
8683 if (TYPE_LENGTH (type) <= 4)
8684 {
8685 /* Values of one word or less are zero/sign-extended and
8686 returned in r0. */
8687 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8688 LONGEST val = unpack_long (type, valbuf);
8689
8690 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8691 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8692 }
8693 else
8694 {
8695 /* Integral values greater than one word are stored in consecutive
8696 registers starting with r0. This will always be a multiple of
8697 the regiser size. */
8698 int len = TYPE_LENGTH (type);
8699 int regno = ARM_A1_REGNUM;
8700
8701 while (len > 0)
8702 {
8703 regs->cooked_write (regno++, valbuf);
8704 len -= ARM_INT_REGISTER_SIZE;
8705 valbuf += ARM_INT_REGISTER_SIZE;
8706 }
8707 }
8708 }
8709 else
8710 {
8711 /* For a structure or union the behaviour is as if the value had
8712 been stored to word-aligned memory and then loaded into
8713 registers with 32-bit load instruction(s). */
8714 int len = TYPE_LENGTH (type);
8715 int regno = ARM_A1_REGNUM;
8716 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8717
8718 while (len > 0)
8719 {
8720 memcpy (tmpbuf, valbuf,
8721 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8722 regs->cooked_write (regno++, tmpbuf);
8723 len -= ARM_INT_REGISTER_SIZE;
8724 valbuf += ARM_INT_REGISTER_SIZE;
8725 }
8726 }
8727 }
8728
8729
8730 /* Handle function return values. */
8731
8732 static enum return_value_convention
8733 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8734 struct type *valtype, struct regcache *regcache,
8735 gdb_byte *readbuf, const gdb_byte *writebuf)
8736 {
8737 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8738 struct type *func_type = function ? value_type (function) : NULL;
8739 enum arm_vfp_cprc_base_type vfp_base_type;
8740 int vfp_base_count;
8741
8742 if (arm_vfp_abi_for_function (gdbarch, func_type)
8743 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8744 {
8745 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8746 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8747 int i;
8748 for (i = 0; i < vfp_base_count; i++)
8749 {
8750 if (reg_char == 'q')
8751 {
8752 if (writebuf)
8753 arm_neon_quad_write (gdbarch, regcache, i,
8754 writebuf + i * unit_length);
8755
8756 if (readbuf)
8757 arm_neon_quad_read (gdbarch, regcache, i,
8758 readbuf + i * unit_length);
8759 }
8760 else
8761 {
8762 char name_buf[4];
8763 int regnum;
8764
8765 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8766 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8767 strlen (name_buf));
8768 if (writebuf)
8769 regcache->cooked_write (regnum, writebuf + i * unit_length);
8770 if (readbuf)
8771 regcache->cooked_read (regnum, readbuf + i * unit_length);
8772 }
8773 }
8774 return RETURN_VALUE_REGISTER_CONVENTION;
8775 }
8776
8777 if (valtype->code () == TYPE_CODE_STRUCT
8778 || valtype->code () == TYPE_CODE_UNION
8779 || valtype->code () == TYPE_CODE_ARRAY)
8780 {
8781 /* From the AAPCS document:
8782
8783 Result return:
8784
8785 A Composite Type larger than 4 bytes, or whose size cannot be
8786 determined statically by both caller and callee, is stored in memory
8787 at an address passed as an extra argument when the function was
8788 called (Parameter Passing, rule A.4). The memory to be used for the
8789 result may be modified at any point during the function call.
8790
8791 Parameter Passing:
8792
8793 A.4: If the subroutine is a function that returns a result in memory,
8794 then the address for the result is placed in r0 and the NCRN is set
8795 to r1. */
8796 if (tdep->struct_return == pcc_struct_return
8797 || arm_return_in_memory (gdbarch, valtype))
8798 {
8799 if (readbuf)
8800 {
8801 CORE_ADDR addr;
8802
8803 regcache->cooked_read (ARM_A1_REGNUM, &addr);
8804 read_memory (addr, readbuf, TYPE_LENGTH (valtype));
8805 }
8806 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
8807 }
8808 }
8809 else if (valtype->code () == TYPE_CODE_COMPLEX)
8810 {
8811 if (arm_return_in_memory (gdbarch, valtype))
8812 return RETURN_VALUE_STRUCT_CONVENTION;
8813 }
8814
8815 if (writebuf)
8816 arm_store_return_value (valtype, regcache, writebuf);
8817
8818 if (readbuf)
8819 arm_extract_return_value (valtype, regcache, readbuf);
8820
8821 return RETURN_VALUE_REGISTER_CONVENTION;
8822 }
8823
8824
8825 static int
8826 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8827 {
8828 struct gdbarch *gdbarch = get_frame_arch (frame);
8829 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8830 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8831 CORE_ADDR jb_addr;
8832 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8833
8834 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8835
8836 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8837 ARM_INT_REGISTER_SIZE))
8838 return 0;
8839
8840 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8841 return 1;
8842 }
8843 /* A call to cmse secure entry function "foo" at "a" is modified by
8844 GNU ld as "b".
8845 a) bl xxxx <foo>
8846
8847 <foo>
8848 xxxx:
8849
8850 b) bl yyyy <__acle_se_foo>
8851
8852 section .gnu.sgstubs:
8853 <foo>
8854 yyyy: sg // secure gateway
8855 b.w xxxx <__acle_se_foo> // original_branch_dest
8856
8857 <__acle_se_foo>
8858 xxxx:
8859
8860 When the control at "b", the pc contains "yyyy" (sg address) which is a
8861 trampoline and does not exist in source code. This function returns the
8862 target pc "xxxx". For more details please refer to section 5.4
8863 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8864 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8865 document on www.developer.arm.com. */
8866
8867 static CORE_ADDR
8868 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8869 {
8870 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8871 char *target_name = (char *) alloca (target_len);
8872 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8873
8874 struct bound_minimal_symbol minsym
8875 = lookup_minimal_symbol (target_name, NULL, objfile);
8876
8877 if (minsym.minsym != nullptr)
8878 return minsym.value_address ();
8879
8880 return 0;
8881 }
8882
8883 /* Return true when SEC points to ".gnu.sgstubs" section. */
8884
8885 static bool
8886 arm_is_sgstubs_section (struct obj_section *sec)
8887 {
8888 return (sec != nullptr
8889 && sec->the_bfd_section != nullptr
8890 && sec->the_bfd_section->name != nullptr
8891 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8892 }
8893
8894 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8895 return the target PC. Otherwise return 0. */
8896
8897 CORE_ADDR
8898 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8899 {
8900 const char *name;
8901 int namelen;
8902 CORE_ADDR start_addr;
8903
8904 /* Find the starting address and name of the function containing the PC. */
8905 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8906 {
8907 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8908 check here. */
8909 start_addr = arm_skip_bx_reg (frame, pc);
8910 if (start_addr != 0)
8911 return start_addr;
8912
8913 return 0;
8914 }
8915
8916 /* If PC is in a Thumb call or return stub, return the address of the
8917 target PC, which is in a register. The thunk functions are called
8918 _call_via_xx, where x is the register name. The possible names
8919 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8920 functions, named __ARM_call_via_r[0-7]. */
8921 if (startswith (name, "_call_via_")
8922 || startswith (name, "__ARM_call_via_"))
8923 {
8924 /* Use the name suffix to determine which register contains the
8925 target PC. */
8926 static const char *table[15] =
8927 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8928 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8929 };
8930 int regno;
8931 int offset = strlen (name) - 2;
8932
8933 for (regno = 0; regno <= 14; regno++)
8934 if (strcmp (&name[offset], table[regno]) == 0)
8935 return get_frame_register_unsigned (frame, regno);
8936 }
8937
8938 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8939 non-interworking calls to foo. We could decode the stubs
8940 to find the target but it's easier to use the symbol table. */
8941 namelen = strlen (name);
8942 if (name[0] == '_' && name[1] == '_'
8943 && ((namelen > 2 + strlen ("_from_thumb")
8944 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8945 || (namelen > 2 + strlen ("_from_arm")
8946 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8947 {
8948 char *target_name;
8949 int target_len = namelen - 2;
8950 struct bound_minimal_symbol minsym;
8951 struct objfile *objfile;
8952 struct obj_section *sec;
8953
8954 if (name[namelen - 1] == 'b')
8955 target_len -= strlen ("_from_thumb");
8956 else
8957 target_len -= strlen ("_from_arm");
8958
8959 target_name = (char *) alloca (target_len + 1);
8960 memcpy (target_name, name + 2, target_len);
8961 target_name[target_len] = '\0';
8962
8963 sec = find_pc_section (pc);
8964 objfile = (sec == NULL) ? NULL : sec->objfile;
8965 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8966 if (minsym.minsym != NULL)
8967 return minsym.value_address ();
8968 else
8969 return 0;
8970 }
8971
8972 struct obj_section *section = find_pc_section (pc);
8973
8974 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8975 if (arm_is_sgstubs_section (section))
8976 return arm_skip_cmse_entry (pc, name, section->objfile);
8977
8978 return 0; /* not a stub */
8979 }
8980
8981 static void
8982 arm_update_current_architecture (void)
8983 {
8984 /* If the current architecture is not ARM, we have nothing to do. */
8985 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8986 return;
8987
8988 /* Update the architecture. */
8989 gdbarch_info info;
8990 if (!gdbarch_update_p (info))
8991 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8992 }
8993
8994 static void
8995 set_fp_model_sfunc (const char *args, int from_tty,
8996 struct cmd_list_element *c)
8997 {
8998 int fp_model;
8999
9000 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9001 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9002 {
9003 arm_fp_model = (enum arm_float_model) fp_model;
9004 break;
9005 }
9006
9007 if (fp_model == ARM_FLOAT_LAST)
9008 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9009 current_fp_model);
9010
9011 arm_update_current_architecture ();
9012 }
9013
9014 static void
9015 show_fp_model (struct ui_file *file, int from_tty,
9016 struct cmd_list_element *c, const char *value)
9017 {
9018 arm_gdbarch_tdep *tdep
9019 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9020
9021 if (arm_fp_model == ARM_FLOAT_AUTO
9022 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9023 gdb_printf (file, _("\
9024 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9025 fp_model_strings[tdep->fp_model]);
9026 else
9027 gdb_printf (file, _("\
9028 The current ARM floating point model is \"%s\".\n"),
9029 fp_model_strings[arm_fp_model]);
9030 }
9031
9032 static void
9033 arm_set_abi (const char *args, int from_tty,
9034 struct cmd_list_element *c)
9035 {
9036 int arm_abi;
9037
9038 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9039 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9040 {
9041 arm_abi_global = (enum arm_abi_kind) arm_abi;
9042 break;
9043 }
9044
9045 if (arm_abi == ARM_ABI_LAST)
9046 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9047 arm_abi_string);
9048
9049 arm_update_current_architecture ();
9050 }
9051
9052 static void
9053 arm_show_abi (struct ui_file *file, int from_tty,
9054 struct cmd_list_element *c, const char *value)
9055 {
9056 arm_gdbarch_tdep *tdep
9057 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9058
9059 if (arm_abi_global == ARM_ABI_AUTO
9060 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9061 gdb_printf (file, _("\
9062 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9063 arm_abi_strings[tdep->arm_abi]);
9064 else
9065 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9066 arm_abi_string);
9067 }
9068
9069 static void
9070 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9071 struct cmd_list_element *c, const char *value)
9072 {
9073 gdb_printf (file,
9074 _("The current execution mode assumed "
9075 "(when symbols are unavailable) is \"%s\".\n"),
9076 arm_fallback_mode_string);
9077 }
9078
9079 static void
9080 arm_show_force_mode (struct ui_file *file, int from_tty,
9081 struct cmd_list_element *c, const char *value)
9082 {
9083 gdb_printf (file,
9084 _("The current execution mode assumed "
9085 "(even when symbols are available) is \"%s\".\n"),
9086 arm_force_mode_string);
9087 }
9088
9089 static void
9090 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9091 struct cmd_list_element *c, const char *value)
9092 {
9093 gdb_printf (file,
9094 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9095 arm_unwind_secure_frames ? "on" : "off");
9096 }
9097
9098 /* If the user changes the register disassembly style used for info
9099 register and other commands, we have to also switch the style used
9100 in opcodes for disassembly output. This function is run in the "set
9101 arm disassembly" command, and does that. */
9102
9103 static void
9104 set_disassembly_style_sfunc (const char *args, int from_tty,
9105 struct cmd_list_element *c)
9106 {
9107 /* Convert the short style name into the long style name (eg, reg-names-*)
9108 before calling the generic set_disassembler_options() function. */
9109 std::string long_name = std::string ("reg-names-") + disassembly_style;
9110 set_disassembler_options (&long_name[0]);
9111 }
9112
9113 static void
9114 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9115 struct cmd_list_element *c, const char *value)
9116 {
9117 struct gdbarch *gdbarch = get_current_arch ();
9118 char *options = get_disassembler_options (gdbarch);
9119 const char *style = "";
9120 int len = 0;
9121 const char *opt;
9122
9123 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9124 if (startswith (opt, "reg-names-"))
9125 {
9126 style = &opt[strlen ("reg-names-")];
9127 len = strcspn (style, ",");
9128 }
9129
9130 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9131 }
9132 \f
9133 /* Return the ARM register name corresponding to register I. */
9134 static const char *
9135 arm_register_name (struct gdbarch *gdbarch, int i)
9136 {
9137 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9138
9139 if (is_s_pseudo (gdbarch, i))
9140 {
9141 static const char *const s_pseudo_names[] = {
9142 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9143 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9144 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9145 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9146 };
9147
9148 return s_pseudo_names[i - tdep->s_pseudo_base];
9149 }
9150
9151 if (is_q_pseudo (gdbarch, i))
9152 {
9153 static const char *const q_pseudo_names[] = {
9154 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9155 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9156 };
9157
9158 return q_pseudo_names[i - tdep->q_pseudo_base];
9159 }
9160
9161 if (is_mve_pseudo (gdbarch, i))
9162 return "p0";
9163
9164 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9165 if (is_pacbti_pseudo (gdbarch, i))
9166 return "";
9167
9168 if (i >= ARRAY_SIZE (arm_register_names))
9169 /* These registers are only supported on targets which supply
9170 an XML description. */
9171 return "";
9172
9173 /* Non-pseudo registers. */
9174 return arm_register_names[i];
9175 }
9176
9177 /* Test whether the coff symbol specific value corresponds to a Thumb
9178 function. */
9179
9180 static int
9181 coff_sym_is_thumb (int val)
9182 {
9183 return (val == C_THUMBEXT
9184 || val == C_THUMBSTAT
9185 || val == C_THUMBEXTFUNC
9186 || val == C_THUMBSTATFUNC
9187 || val == C_THUMBLABEL);
9188 }
9189
9190 /* arm_coff_make_msymbol_special()
9191 arm_elf_make_msymbol_special()
9192
9193 These functions test whether the COFF or ELF symbol corresponds to
9194 an address in thumb code, and set a "special" bit in a minimal
9195 symbol to indicate that it does. */
9196
9197 static void
9198 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9199 {
9200 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9201
9202 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9203 == ST_BRANCH_TO_THUMB)
9204 MSYMBOL_SET_SPECIAL (msym);
9205 }
9206
9207 static void
9208 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9209 {
9210 if (coff_sym_is_thumb (val))
9211 MSYMBOL_SET_SPECIAL (msym);
9212 }
9213
9214 static void
9215 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9216 asymbol *sym)
9217 {
9218 const char *name = bfd_asymbol_name (sym);
9219 struct arm_per_bfd *data;
9220 struct arm_mapping_symbol new_map_sym;
9221
9222 gdb_assert (name[0] == '$');
9223 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9224 return;
9225
9226 data = arm_bfd_data_key.get (objfile->obfd);
9227 if (data == NULL)
9228 data = arm_bfd_data_key.emplace (objfile->obfd,
9229 objfile->obfd->section_count);
9230 arm_mapping_symbol_vec &map
9231 = data->section_maps[bfd_asymbol_section (sym)->index];
9232
9233 new_map_sym.value = sym->value;
9234 new_map_sym.type = name[1];
9235
9236 /* Insert at the end, the vector will be sorted on first use. */
9237 map.push_back (new_map_sym);
9238 }
9239
9240 static void
9241 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9242 {
9243 struct gdbarch *gdbarch = regcache->arch ();
9244 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9245
9246 /* If necessary, set the T bit. */
9247 if (arm_apcs_32)
9248 {
9249 ULONGEST val, t_bit;
9250 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9251 t_bit = arm_psr_thumb_bit (gdbarch);
9252 if (arm_pc_is_thumb (gdbarch, pc))
9253 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9254 val | t_bit);
9255 else
9256 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9257 val & ~t_bit);
9258 }
9259 }
9260
9261 /* Read the contents of a NEON quad register, by reading from two
9262 double registers. This is used to implement the quad pseudo
9263 registers, and for argument passing in case the quad registers are
9264 missing; vectors are passed in quad registers when using the VFP
9265 ABI, even if a NEON unit is not present. REGNUM is the index of
9266 the quad register, in [0, 15]. */
9267
9268 static enum register_status
9269 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9270 int regnum, gdb_byte *buf)
9271 {
9272 char name_buf[4];
9273 gdb_byte reg_buf[8];
9274 int offset, double_regnum;
9275 enum register_status status;
9276
9277 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9278 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9279 strlen (name_buf));
9280
9281 /* d0 is always the least significant half of q0. */
9282 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9283 offset = 8;
9284 else
9285 offset = 0;
9286
9287 status = regcache->raw_read (double_regnum, reg_buf);
9288 if (status != REG_VALID)
9289 return status;
9290 memcpy (buf + offset, reg_buf, 8);
9291
9292 offset = 8 - offset;
9293 status = regcache->raw_read (double_regnum + 1, reg_buf);
9294 if (status != REG_VALID)
9295 return status;
9296 memcpy (buf + offset, reg_buf, 8);
9297
9298 return REG_VALID;
9299 }
9300
9301 /* Read the contents of the MVE pseudo register REGNUM and store it
9302 in BUF. */
9303
9304 static enum register_status
9305 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9306 int regnum, gdb_byte *buf)
9307 {
9308 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9309
9310 /* P0 is the first 16 bits of VPR. */
9311 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9312 }
9313
9314 static enum register_status
9315 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9316 int regnum, gdb_byte *buf)
9317 {
9318 const int num_regs = gdbarch_num_regs (gdbarch);
9319 char name_buf[4];
9320 gdb_byte reg_buf[8];
9321 int offset, double_regnum;
9322 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9323
9324 gdb_assert (regnum >= num_regs);
9325
9326 if (is_q_pseudo (gdbarch, regnum))
9327 {
9328 /* Quad-precision register. */
9329 return arm_neon_quad_read (gdbarch, regcache,
9330 regnum - tdep->q_pseudo_base, buf);
9331 }
9332 else if (is_mve_pseudo (gdbarch, regnum))
9333 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9334 else
9335 {
9336 enum register_status status;
9337
9338 regnum -= tdep->s_pseudo_base;
9339 /* Single-precision register. */
9340 gdb_assert (regnum < 32);
9341
9342 /* s0 is always the least significant half of d0. */
9343 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9344 offset = (regnum & 1) ? 0 : 4;
9345 else
9346 offset = (regnum & 1) ? 4 : 0;
9347
9348 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9349 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9350 strlen (name_buf));
9351
9352 status = regcache->raw_read (double_regnum, reg_buf);
9353 if (status == REG_VALID)
9354 memcpy (buf, reg_buf + offset, 4);
9355 return status;
9356 }
9357 }
9358
9359 /* Store the contents of BUF to a NEON quad register, by writing to
9360 two double registers. This is used to implement the quad pseudo
9361 registers, and for argument passing in case the quad registers are
9362 missing; vectors are passed in quad registers when using the VFP
9363 ABI, even if a NEON unit is not present. REGNUM is the index
9364 of the quad register, in [0, 15]. */
9365
9366 static void
9367 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9368 int regnum, const gdb_byte *buf)
9369 {
9370 char name_buf[4];
9371 int offset, double_regnum;
9372
9373 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9374 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9375 strlen (name_buf));
9376
9377 /* d0 is always the least significant half of q0. */
9378 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9379 offset = 8;
9380 else
9381 offset = 0;
9382
9383 regcache->raw_write (double_regnum, buf + offset);
9384 offset = 8 - offset;
9385 regcache->raw_write (double_regnum + 1, buf + offset);
9386 }
9387
9388 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9389
9390 static void
9391 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9392 int regnum, const gdb_byte *buf)
9393 {
9394 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9395
9396 /* P0 is the first 16 bits of VPR. */
9397 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9398 }
9399
9400 static void
9401 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9402 int regnum, const gdb_byte *buf)
9403 {
9404 const int num_regs = gdbarch_num_regs (gdbarch);
9405 char name_buf[4];
9406 gdb_byte reg_buf[8];
9407 int offset, double_regnum;
9408 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9409
9410 gdb_assert (regnum >= num_regs);
9411
9412 if (is_q_pseudo (gdbarch, regnum))
9413 {
9414 /* Quad-precision register. */
9415 arm_neon_quad_write (gdbarch, regcache,
9416 regnum - tdep->q_pseudo_base, buf);
9417 }
9418 else if (is_mve_pseudo (gdbarch, regnum))
9419 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9420 else
9421 {
9422 regnum -= tdep->s_pseudo_base;
9423 /* Single-precision register. */
9424 gdb_assert (regnum < 32);
9425
9426 /* s0 is always the least significant half of d0. */
9427 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9428 offset = (regnum & 1) ? 0 : 4;
9429 else
9430 offset = (regnum & 1) ? 4 : 0;
9431
9432 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9433 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9434 strlen (name_buf));
9435
9436 regcache->raw_read (double_regnum, reg_buf);
9437 memcpy (reg_buf + offset, buf, 4);
9438 regcache->raw_write (double_regnum, reg_buf);
9439 }
9440 }
9441
9442 static struct value *
9443 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9444 {
9445 const int *reg_p = (const int *) baton;
9446 return value_of_register (*reg_p, frame);
9447 }
9448 \f
9449 static enum gdb_osabi
9450 arm_elf_osabi_sniffer (bfd *abfd)
9451 {
9452 unsigned int elfosabi;
9453 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9454
9455 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9456
9457 if (elfosabi == ELFOSABI_ARM)
9458 /* GNU tools use this value. Check note sections in this case,
9459 as well. */
9460 {
9461 for (asection *sect : gdb_bfd_sections (abfd))
9462 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9463 }
9464
9465 /* Anything else will be handled by the generic ELF sniffer. */
9466 return osabi;
9467 }
9468
9469 static int
9470 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9471 const struct reggroup *group)
9472 {
9473 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9474 this, FPS register belongs to save_regroup, restore_reggroup, and
9475 all_reggroup, of course. */
9476 if (regnum == ARM_FPS_REGNUM)
9477 return (group == float_reggroup
9478 || group == save_reggroup
9479 || group == restore_reggroup
9480 || group == all_reggroup);
9481 else
9482 return default_register_reggroup_p (gdbarch, regnum, group);
9483 }
9484
9485 /* For backward-compatibility we allow two 'g' packet lengths with
9486 the remote protocol depending on whether FPA registers are
9487 supplied. M-profile targets do not have FPA registers, but some
9488 stubs already exist in the wild which use a 'g' packet which
9489 supplies them albeit with dummy values. The packet format which
9490 includes FPA registers should be considered deprecated for
9491 M-profile targets. */
9492
9493 static void
9494 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9495 {
9496 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9497
9498 if (tdep->is_m)
9499 {
9500 const target_desc *tdesc;
9501
9502 /* If we know from the executable this is an M-profile target,
9503 cater for remote targets whose register set layout is the
9504 same as the FPA layout. */
9505 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9506 register_remote_g_packet_guess (gdbarch,
9507 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9508 tdesc);
9509
9510 /* The regular M-profile layout. */
9511 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9512 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9513 tdesc);
9514
9515 /* M-profile plus M4F VFP. */
9516 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9517 register_remote_g_packet_guess (gdbarch,
9518 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9519 tdesc);
9520 /* M-profile plus MVE. */
9521 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9522 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9523 + ARM_VFP2_REGS_SIZE
9524 + ARM_INT_REGISTER_SIZE, tdesc);
9525
9526 /* M-profile system (stack pointers). */
9527 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
9528 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
9529 }
9530
9531 /* Otherwise we don't have a useful guess. */
9532 }
9533
9534 /* Implement the code_of_frame_writable gdbarch method. */
9535
9536 static int
9537 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9538 {
9539 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9540
9541 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9542 {
9543 /* M-profile exception frames return to some magic PCs, where
9544 isn't writable at all. */
9545 return 0;
9546 }
9547 else
9548 return 1;
9549 }
9550
9551 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9552 to be postfixed by a version (eg armv7hl). */
9553
9554 static const char *
9555 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9556 {
9557 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9558 return "arm(v[^- ]*)?";
9559 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9560 }
9561
9562 /* Implement the "get_pc_address_flags" gdbarch method. */
9563
9564 static std::string
9565 arm_get_pc_address_flags (frame_info *frame, CORE_ADDR pc)
9566 {
9567 if (get_frame_pc_masked (frame))
9568 return "PAC";
9569
9570 return "";
9571 }
9572
9573 /* Initialize the current architecture based on INFO. If possible,
9574 re-use an architecture from ARCHES, which is a list of
9575 architectures already created during this debugging session.
9576
9577 Called e.g. at program startup, when reading a core file, and when
9578 reading a binary file. */
9579
9580 static struct gdbarch *
9581 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9582 {
9583 struct gdbarch *gdbarch;
9584 struct gdbarch_list *best_arch;
9585 enum arm_abi_kind arm_abi = arm_abi_global;
9586 enum arm_float_model fp_model = arm_fp_model;
9587 tdesc_arch_data_up tdesc_data;
9588 int i;
9589 bool is_m = false;
9590 bool have_sec_ext = false;
9591 int vfp_register_count = 0;
9592 bool have_s_pseudos = false, have_q_pseudos = false;
9593 bool have_wmmx_registers = false;
9594 bool have_neon = false;
9595 bool have_fpa_registers = true;
9596 const struct target_desc *tdesc = info.target_desc;
9597 bool have_vfp = false;
9598 bool have_mve = false;
9599 bool have_pacbti = false;
9600 int mve_vpr_regnum = -1;
9601 int register_count = ARM_NUM_REGS;
9602 bool have_m_profile_msp = false;
9603 int m_profile_msp_regnum = -1;
9604 int m_profile_psp_regnum = -1;
9605 int m_profile_msp_ns_regnum = -1;
9606 int m_profile_psp_ns_regnum = -1;
9607 int m_profile_msp_s_regnum = -1;
9608 int m_profile_psp_s_regnum = -1;
9609
9610 /* If we have an object to base this architecture on, try to determine
9611 its ABI. */
9612
9613 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9614 {
9615 int ei_osabi, e_flags;
9616
9617 switch (bfd_get_flavour (info.abfd))
9618 {
9619 case bfd_target_coff_flavour:
9620 /* Assume it's an old APCS-style ABI. */
9621 /* XXX WinCE? */
9622 arm_abi = ARM_ABI_APCS;
9623 break;
9624
9625 case bfd_target_elf_flavour:
9626 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9627 e_flags = elf_elfheader (info.abfd)->e_flags;
9628
9629 if (ei_osabi == ELFOSABI_ARM)
9630 {
9631 /* GNU tools used to use this value, but do not for EABI
9632 objects. There's nowhere to tag an EABI version
9633 anyway, so assume APCS. */
9634 arm_abi = ARM_ABI_APCS;
9635 }
9636 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9637 {
9638 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9639
9640 switch (eabi_ver)
9641 {
9642 case EF_ARM_EABI_UNKNOWN:
9643 /* Assume GNU tools. */
9644 arm_abi = ARM_ABI_APCS;
9645 break;
9646
9647 case EF_ARM_EABI_VER4:
9648 case EF_ARM_EABI_VER5:
9649 arm_abi = ARM_ABI_AAPCS;
9650 /* EABI binaries default to VFP float ordering.
9651 They may also contain build attributes that can
9652 be used to identify if the VFP argument-passing
9653 ABI is in use. */
9654 if (fp_model == ARM_FLOAT_AUTO)
9655 {
9656 #ifdef HAVE_ELF
9657 switch (bfd_elf_get_obj_attr_int (info.abfd,
9658 OBJ_ATTR_PROC,
9659 Tag_ABI_VFP_args))
9660 {
9661 case AEABI_VFP_args_base:
9662 /* "The user intended FP parameter/result
9663 passing to conform to AAPCS, base
9664 variant". */
9665 fp_model = ARM_FLOAT_SOFT_VFP;
9666 break;
9667 case AEABI_VFP_args_vfp:
9668 /* "The user intended FP parameter/result
9669 passing to conform to AAPCS, VFP
9670 variant". */
9671 fp_model = ARM_FLOAT_VFP;
9672 break;
9673 case AEABI_VFP_args_toolchain:
9674 /* "The user intended FP parameter/result
9675 passing to conform to tool chain-specific
9676 conventions" - we don't know any such
9677 conventions, so leave it as "auto". */
9678 break;
9679 case AEABI_VFP_args_compatible:
9680 /* "Code is compatible with both the base
9681 and VFP variants; the user did not permit
9682 non-variadic functions to pass FP
9683 parameters/results" - leave it as
9684 "auto". */
9685 break;
9686 default:
9687 /* Attribute value not mentioned in the
9688 November 2012 ABI, so leave it as
9689 "auto". */
9690 break;
9691 }
9692 #else
9693 fp_model = ARM_FLOAT_SOFT_VFP;
9694 #endif
9695 }
9696 break;
9697
9698 default:
9699 /* Leave it as "auto". */
9700 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9701 break;
9702 }
9703
9704 #ifdef HAVE_ELF
9705 /* Detect M-profile programs. This only works if the
9706 executable file includes build attributes; GCC does
9707 copy them to the executable, but e.g. RealView does
9708 not. */
9709 int attr_arch
9710 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9711 Tag_CPU_arch);
9712 int attr_profile
9713 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9714 Tag_CPU_arch_profile);
9715
9716 /* GCC specifies the profile for v6-M; RealView only
9717 specifies the profile for architectures starting with
9718 V7 (as opposed to architectures with a tag
9719 numerically greater than TAG_CPU_ARCH_V7). */
9720 if (!tdesc_has_registers (tdesc)
9721 && (attr_arch == TAG_CPU_ARCH_V6_M
9722 || attr_arch == TAG_CPU_ARCH_V6S_M
9723 || attr_arch == TAG_CPU_ARCH_V7E_M
9724 || attr_arch == TAG_CPU_ARCH_V8M_BASE
9725 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
9726 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9727 || attr_profile == 'M'))
9728 is_m = true;
9729
9730 /* Look for attributes that indicate support for ARMv8.1-m
9731 PACBTI. */
9732 if (!tdesc_has_registers (tdesc) && is_m)
9733 {
9734 int attr_pac_extension
9735 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9736 Tag_PAC_extension);
9737
9738 int attr_bti_extension
9739 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9740 Tag_BTI_extension);
9741
9742 int attr_pacret_use
9743 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9744 Tag_PACRET_use);
9745
9746 int attr_bti_use
9747 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9748 Tag_BTI_use);
9749
9750 if (attr_pac_extension != 0 || attr_bti_extension != 0
9751 || attr_pacret_use != 0 || attr_bti_use != 0)
9752 have_pacbti = true;
9753 }
9754 #endif
9755 }
9756
9757 if (fp_model == ARM_FLOAT_AUTO)
9758 {
9759 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9760 {
9761 case 0:
9762 /* Leave it as "auto". Strictly speaking this case
9763 means FPA, but almost nobody uses that now, and
9764 many toolchains fail to set the appropriate bits
9765 for the floating-point model they use. */
9766 break;
9767 case EF_ARM_SOFT_FLOAT:
9768 fp_model = ARM_FLOAT_SOFT_FPA;
9769 break;
9770 case EF_ARM_VFP_FLOAT:
9771 fp_model = ARM_FLOAT_VFP;
9772 break;
9773 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9774 fp_model = ARM_FLOAT_SOFT_VFP;
9775 break;
9776 }
9777 }
9778
9779 if (e_flags & EF_ARM_BE8)
9780 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9781
9782 break;
9783
9784 default:
9785 /* Leave it as "auto". */
9786 break;
9787 }
9788 }
9789
9790 /* Check any target description for validity. */
9791 if (tdesc_has_registers (tdesc))
9792 {
9793 /* For most registers we require GDB's default names; but also allow
9794 the numeric names for sp / lr / pc, as a convenience. */
9795 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9796 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9797 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9798
9799 const struct tdesc_feature *feature;
9800 int valid_p;
9801
9802 feature = tdesc_find_feature (tdesc,
9803 "org.gnu.gdb.arm.core");
9804 if (feature == NULL)
9805 {
9806 feature = tdesc_find_feature (tdesc,
9807 "org.gnu.gdb.arm.m-profile");
9808 if (feature == NULL)
9809 return NULL;
9810 else
9811 is_m = true;
9812 }
9813
9814 tdesc_data = tdesc_data_alloc ();
9815
9816 valid_p = 1;
9817 for (i = 0; i < ARM_SP_REGNUM; i++)
9818 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9819 arm_register_names[i]);
9820 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9821 ARM_SP_REGNUM,
9822 arm_sp_names);
9823 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9824 ARM_LR_REGNUM,
9825 arm_lr_names);
9826 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9827 ARM_PC_REGNUM,
9828 arm_pc_names);
9829 if (is_m)
9830 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9831 ARM_PS_REGNUM, "xpsr");
9832 else
9833 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9834 ARM_PS_REGNUM, "cpsr");
9835
9836 if (!valid_p)
9837 return NULL;
9838
9839 if (is_m)
9840 {
9841 feature = tdesc_find_feature (tdesc,
9842 "org.gnu.gdb.arm.m-system");
9843 if (feature != nullptr)
9844 {
9845 /* MSP */
9846 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9847 register_count, "msp");
9848 if (!valid_p)
9849 {
9850 warning (_("M-profile m-system feature is missing required register msp."));
9851 return nullptr;
9852 }
9853 have_m_profile_msp = true;
9854 m_profile_msp_regnum = register_count++;
9855
9856 /* PSP */
9857 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9858 register_count, "psp");
9859 if (!valid_p)
9860 {
9861 warning (_("M-profile m-system feature is missing required register psp."));
9862 return nullptr;
9863 }
9864 m_profile_psp_regnum = register_count++;
9865 }
9866 }
9867
9868 feature = tdesc_find_feature (tdesc,
9869 "org.gnu.gdb.arm.fpa");
9870 if (feature != NULL)
9871 {
9872 valid_p = 1;
9873 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9874 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9875 arm_register_names[i]);
9876 if (!valid_p)
9877 return NULL;
9878 }
9879 else
9880 have_fpa_registers = false;
9881
9882 feature = tdesc_find_feature (tdesc,
9883 "org.gnu.gdb.xscale.iwmmxt");
9884 if (feature != NULL)
9885 {
9886 static const char *const iwmmxt_names[] = {
9887 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9888 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9889 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9890 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9891 };
9892
9893 valid_p = 1;
9894 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9895 valid_p
9896 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9897 iwmmxt_names[i - ARM_WR0_REGNUM]);
9898
9899 /* Check for the control registers, but do not fail if they
9900 are missing. */
9901 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9902 tdesc_numbered_register (feature, tdesc_data.get (), i,
9903 iwmmxt_names[i - ARM_WR0_REGNUM]);
9904
9905 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9906 valid_p
9907 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9908 iwmmxt_names[i - ARM_WR0_REGNUM]);
9909
9910 if (!valid_p)
9911 return NULL;
9912
9913 have_wmmx_registers = true;
9914 }
9915
9916 /* If we have a VFP unit, check whether the single precision registers
9917 are present. If not, then we will synthesize them as pseudo
9918 registers. */
9919 feature = tdesc_find_feature (tdesc,
9920 "org.gnu.gdb.arm.vfp");
9921 if (feature != NULL)
9922 {
9923 static const char *const vfp_double_names[] = {
9924 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9925 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9926 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9927 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9928 };
9929
9930 /* Require the double precision registers. There must be either
9931 16 or 32. */
9932 valid_p = 1;
9933 for (i = 0; i < 32; i++)
9934 {
9935 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9936 ARM_D0_REGNUM + i,
9937 vfp_double_names[i]);
9938 if (!valid_p)
9939 break;
9940 }
9941 if (!valid_p && i == 16)
9942 valid_p = 1;
9943
9944 /* Also require FPSCR. */
9945 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9946 ARM_FPSCR_REGNUM, "fpscr");
9947 if (!valid_p)
9948 return NULL;
9949
9950 have_vfp = true;
9951
9952 if (tdesc_unnumbered_register (feature, "s0") == 0)
9953 have_s_pseudos = true;
9954
9955 vfp_register_count = i;
9956
9957 /* If we have VFP, also check for NEON. The architecture allows
9958 NEON without VFP (integer vector operations only), but GDB
9959 does not support that. */
9960 feature = tdesc_find_feature (tdesc,
9961 "org.gnu.gdb.arm.neon");
9962 if (feature != NULL)
9963 {
9964 /* NEON requires 32 double-precision registers. */
9965 if (i != 32)
9966 return NULL;
9967
9968 /* If there are quad registers defined by the stub, use
9969 their type; otherwise (normally) provide them with
9970 the default type. */
9971 if (tdesc_unnumbered_register (feature, "q0") == 0)
9972 have_q_pseudos = true;
9973 }
9974 }
9975
9976 /* Check for MVE after all the checks for GPR's, VFP and Neon.
9977 MVE (Helium) is an M-profile extension. */
9978 if (is_m)
9979 {
9980 /* Do we have the MVE feature? */
9981 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
9982
9983 if (feature != nullptr)
9984 {
9985 /* If we have MVE, we must always have the VPR register. */
9986 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9987 register_count, "vpr");
9988 if (!valid_p)
9989 {
9990 warning (_("MVE feature is missing required register vpr."));
9991 return nullptr;
9992 }
9993
9994 have_mve = true;
9995 mve_vpr_regnum = register_count;
9996 register_count++;
9997
9998 /* We can't have Q pseudo registers available here, as that
9999 would mean we have NEON features, and that is only available
10000 on A and R profiles. */
10001 gdb_assert (!have_q_pseudos);
10002
10003 /* Given we have a M-profile target description, if MVE is
10004 enabled and there are VFP registers, we should have Q
10005 pseudo registers (Q0 ~ Q7). */
10006 if (have_vfp)
10007 have_q_pseudos = true;
10008 }
10009
10010 /* Do we have the ARMv8.1-m PACBTI feature? */
10011 feature = tdesc_find_feature (tdesc,
10012 "org.gnu.gdb.arm.m-profile-pacbti");
10013 if (feature != nullptr)
10014 {
10015 /* By advertising this feature, the target acknowledges the
10016 presence of the ARMv8.1-m PACBTI extensions.
10017
10018 We don't care for any particular registers in this group, so
10019 the target is free to include whatever it deems appropriate.
10020
10021 The expectation is for this feature to include the PAC
10022 keys. */
10023 have_pacbti = true;
10024 }
10025
10026 /* Do we have the Security extension? */
10027 feature = tdesc_find_feature (tdesc,
10028 "org.gnu.gdb.arm.secext");
10029 if (feature != nullptr)
10030 {
10031 /* Secure/Non-secure stack pointers. */
10032 /* MSP_NS */
10033 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10034 register_count, "msp_ns");
10035 if (!valid_p)
10036 {
10037 warning (_("M-profile secext feature is missing required register msp_ns."));
10038 return nullptr;
10039 }
10040 m_profile_msp_ns_regnum = register_count++;
10041
10042 /* PSP_NS */
10043 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10044 register_count, "psp_ns");
10045 if (!valid_p)
10046 {
10047 warning (_("M-profile secext feature is missing required register psp_ns."));
10048 return nullptr;
10049 }
10050 m_profile_psp_ns_regnum = register_count++;
10051
10052 /* MSP_S */
10053 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10054 register_count, "msp_s");
10055 if (!valid_p)
10056 {
10057 warning (_("M-profile secext feature is missing required register msp_s."));
10058 return nullptr;
10059 }
10060 m_profile_msp_s_regnum = register_count++;
10061
10062 /* PSP_S */
10063 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10064 register_count, "psp_s");
10065 if (!valid_p)
10066 {
10067 warning (_("M-profile secext feature is missing required register psp_s."));
10068 return nullptr;
10069 }
10070 m_profile_psp_s_regnum = register_count++;
10071
10072 have_sec_ext = true;
10073 }
10074
10075 }
10076 }
10077
10078 /* If there is already a candidate, use it. */
10079 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10080 best_arch != NULL;
10081 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10082 {
10083 arm_gdbarch_tdep *tdep
10084 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
10085
10086 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10087 continue;
10088
10089 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10090 continue;
10091
10092 /* There are various other properties in tdep that we do not
10093 need to check here: those derived from a target description,
10094 since gdbarches with a different target description are
10095 automatically disqualified. */
10096
10097 /* Do check is_m, though, since it might come from the binary. */
10098 if (is_m != tdep->is_m)
10099 continue;
10100
10101 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10102 the binary. */
10103 if (have_pacbti != tdep->have_pacbti)
10104 continue;
10105
10106 /* Found a match. */
10107 break;
10108 }
10109
10110 if (best_arch != NULL)
10111 return best_arch->gdbarch;
10112
10113 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
10114 gdbarch = gdbarch_alloc (&info, tdep);
10115
10116 /* Record additional information about the architecture we are defining.
10117 These are gdbarch discriminators, like the OSABI. */
10118 tdep->arm_abi = arm_abi;
10119 tdep->fp_model = fp_model;
10120 tdep->is_m = is_m;
10121 tdep->have_sec_ext = have_sec_ext;
10122 tdep->have_fpa_registers = have_fpa_registers;
10123 tdep->have_wmmx_registers = have_wmmx_registers;
10124 gdb_assert (vfp_register_count == 0
10125 || vfp_register_count == 16
10126 || vfp_register_count == 32);
10127 tdep->vfp_register_count = vfp_register_count;
10128 tdep->have_s_pseudos = have_s_pseudos;
10129 tdep->have_q_pseudos = have_q_pseudos;
10130 tdep->have_neon = have_neon;
10131
10132 /* Adjust the MVE feature settings. */
10133 if (have_mve)
10134 {
10135 tdep->have_mve = true;
10136 tdep->mve_vpr_regnum = mve_vpr_regnum;
10137 }
10138
10139 /* Adjust the PACBTI feature settings. */
10140 tdep->have_pacbti = have_pacbti;
10141
10142 /* Adjust the M-profile stack pointers settings. */
10143 if (have_m_profile_msp)
10144 {
10145 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10146 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10147 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10148 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10149 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10150 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10151 }
10152
10153 arm_register_g_packet_guesses (gdbarch);
10154
10155 /* Breakpoints. */
10156 switch (info.byte_order_for_code)
10157 {
10158 case BFD_ENDIAN_BIG:
10159 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10160 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10161 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10162 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10163
10164 break;
10165
10166 case BFD_ENDIAN_LITTLE:
10167 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10168 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10169 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10170 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10171
10172 break;
10173
10174 default:
10175 internal_error (__FILE__, __LINE__,
10176 _("arm_gdbarch_init: bad byte order for float format"));
10177 }
10178
10179 /* On ARM targets char defaults to unsigned. */
10180 set_gdbarch_char_signed (gdbarch, 0);
10181
10182 /* wchar_t is unsigned under the AAPCS. */
10183 if (tdep->arm_abi == ARM_ABI_AAPCS)
10184 set_gdbarch_wchar_signed (gdbarch, 0);
10185 else
10186 set_gdbarch_wchar_signed (gdbarch, 1);
10187
10188 /* Compute type alignment. */
10189 set_gdbarch_type_align (gdbarch, arm_type_align);
10190
10191 /* Note: for displaced stepping, this includes the breakpoint, and one word
10192 of additional scratch space. This setting isn't used for anything beside
10193 displaced stepping at present. */
10194 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10195
10196 /* This should be low enough for everything. */
10197 tdep->lowest_pc = 0x20;
10198 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10199
10200 /* The default, for both APCS and AAPCS, is to return small
10201 structures in registers. */
10202 tdep->struct_return = reg_struct_return;
10203
10204 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10205 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10206
10207 if (is_m)
10208 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10209
10210 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10211
10212 frame_base_set_default (gdbarch, &arm_normal_base);
10213
10214 /* Address manipulation. */
10215 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10216
10217 /* Advance PC across function entry code. */
10218 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10219
10220 /* Detect whether PC is at a point where the stack has been destroyed. */
10221 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10222
10223 /* Skip trampolines. */
10224 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10225
10226 /* The stack grows downward. */
10227 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10228
10229 /* Breakpoint manipulation. */
10230 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10231 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10232 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10233 arm_breakpoint_kind_from_current_state);
10234
10235 /* Information about registers, etc. */
10236 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10237 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10238 set_gdbarch_num_regs (gdbarch, register_count);
10239 set_gdbarch_register_type (gdbarch, arm_register_type);
10240 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10241
10242 /* This "info float" is FPA-specific. Use the generic version if we
10243 do not have FPA. */
10244 if (tdep->have_fpa_registers)
10245 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10246
10247 /* Internal <-> external register number maps. */
10248 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10249 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10250
10251 set_gdbarch_register_name (gdbarch, arm_register_name);
10252
10253 /* Returning results. */
10254 set_gdbarch_return_value (gdbarch, arm_return_value);
10255
10256 /* Disassembly. */
10257 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10258
10259 /* Minsymbol frobbing. */
10260 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10261 set_gdbarch_coff_make_msymbol_special (gdbarch,
10262 arm_coff_make_msymbol_special);
10263 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10264
10265 /* Thumb-2 IT block support. */
10266 set_gdbarch_adjust_breakpoint_address (gdbarch,
10267 arm_adjust_breakpoint_address);
10268
10269 /* Virtual tables. */
10270 set_gdbarch_vbit_in_delta (gdbarch, 1);
10271
10272 /* Hook in the ABI-specific overrides, if they have been registered. */
10273 gdbarch_init_osabi (info, gdbarch);
10274
10275 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10276
10277 /* Add some default predicates. */
10278 if (is_m)
10279 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10280 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10281 dwarf2_append_unwinders (gdbarch);
10282 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10283 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10284 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10285
10286 /* Now we have tuned the configuration, set a few final things,
10287 based on what the OS ABI has told us. */
10288
10289 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10290 binaries are always marked. */
10291 if (tdep->arm_abi == ARM_ABI_AUTO)
10292 tdep->arm_abi = ARM_ABI_APCS;
10293
10294 /* Watchpoints are not steppable. */
10295 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10296
10297 /* We used to default to FPA for generic ARM, but almost nobody
10298 uses that now, and we now provide a way for the user to force
10299 the model. So default to the most useful variant. */
10300 if (tdep->fp_model == ARM_FLOAT_AUTO)
10301 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10302
10303 if (tdep->jb_pc >= 0)
10304 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10305
10306 /* Floating point sizes and format. */
10307 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10308 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10309 {
10310 set_gdbarch_double_format
10311 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10312 set_gdbarch_long_double_format
10313 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10314 }
10315 else
10316 {
10317 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10318 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10319 }
10320
10321 /* Hook used to decorate frames with signed return addresses, only available
10322 for ARMv8.1-m PACBTI. */
10323 if (is_m && have_pacbti)
10324 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10325
10326 if (tdesc_data != nullptr)
10327 {
10328 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10329
10330 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10331 register_count = gdbarch_num_regs (gdbarch);
10332
10333 /* Override tdesc_register_type to adjust the types of VFP
10334 registers for NEON. */
10335 set_gdbarch_register_type (gdbarch, arm_register_type);
10336 }
10337
10338 /* Initialize the pseudo register data. */
10339 int num_pseudos = 0;
10340 if (tdep->have_s_pseudos)
10341 {
10342 /* VFP single precision pseudo registers (S0~S31). */
10343 tdep->s_pseudo_base = register_count;
10344 tdep->s_pseudo_count = 32;
10345 num_pseudos += tdep->s_pseudo_count;
10346
10347 if (tdep->have_q_pseudos)
10348 {
10349 /* NEON quad precision pseudo registers (Q0~Q15). */
10350 tdep->q_pseudo_base = register_count + num_pseudos;
10351
10352 if (have_neon)
10353 tdep->q_pseudo_count = 16;
10354 else if (have_mve)
10355 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10356
10357 num_pseudos += tdep->q_pseudo_count;
10358 }
10359 }
10360
10361 /* Do we have any MVE pseudo registers? */
10362 if (have_mve)
10363 {
10364 tdep->mve_pseudo_base = register_count + num_pseudos;
10365 tdep->mve_pseudo_count = 1;
10366 num_pseudos += tdep->mve_pseudo_count;
10367 }
10368
10369 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10370 if (have_pacbti)
10371 {
10372 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10373 tdep->pacbti_pseudo_count = 1;
10374 num_pseudos += tdep->pacbti_pseudo_count;
10375 }
10376
10377 /* Set some pseudo register hooks, if we have pseudo registers. */
10378 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10379 {
10380 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10381 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10382 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10383 }
10384
10385 /* Add standard register aliases. We add aliases even for those
10386 names which are used by the current architecture - it's simpler,
10387 and does no harm, since nothing ever lists user registers. */
10388 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10389 user_reg_add (gdbarch, arm_register_aliases[i].name,
10390 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10391
10392 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10393 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10394
10395 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10396
10397 return gdbarch;
10398 }
10399
10400 static void
10401 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10402 {
10403 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
10404
10405 if (tdep == NULL)
10406 return;
10407
10408 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10409 (int) tdep->fp_model);
10410 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10411 (int) tdep->have_fpa_registers);
10412 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10413 (int) tdep->have_wmmx_registers);
10414 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10415 (int) tdep->vfp_register_count);
10416 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10417 tdep->have_s_pseudos? "true" : "false");
10418 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10419 (int) tdep->s_pseudo_base);
10420 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10421 (int) tdep->s_pseudo_count);
10422 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10423 tdep->have_q_pseudos? "true" : "false");
10424 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10425 (int) tdep->q_pseudo_base);
10426 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10427 (int) tdep->q_pseudo_count);
10428 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10429 (int) tdep->have_neon);
10430 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10431 tdep->have_mve? "yes" : "no");
10432 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10433 tdep->mve_vpr_regnum);
10434 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10435 tdep->mve_pseudo_base);
10436 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10437 tdep->mve_pseudo_count);
10438 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10439 tdep->m_profile_msp_regnum);
10440 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10441 tdep->m_profile_psp_regnum);
10442 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10443 tdep->m_profile_msp_ns_regnum);
10444 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10445 tdep->m_profile_psp_ns_regnum);
10446 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10447 tdep->m_profile_msp_s_regnum);
10448 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10449 tdep->m_profile_psp_s_regnum);
10450 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10451 (unsigned long) tdep->lowest_pc);
10452 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10453 tdep->have_pacbti? "yes" : "no");
10454 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10455 tdep->pacbti_pseudo_base);
10456 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10457 tdep->pacbti_pseudo_count);
10458 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10459 tdep->is_m? "yes" : "no");
10460 }
10461
10462 #if GDB_SELF_TEST
10463 namespace selftests
10464 {
10465 static void arm_record_test (void);
10466 static void arm_analyze_prologue_test ();
10467 }
10468 #endif
10469
10470 void _initialize_arm_tdep ();
10471 void
10472 _initialize_arm_tdep ()
10473 {
10474 long length;
10475 int i, j;
10476 char regdesc[1024], *rdptr = regdesc;
10477 size_t rest = sizeof (regdesc);
10478
10479 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10480
10481 /* Add ourselves to objfile event chain. */
10482 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10483
10484 /* Register an ELF OS ABI sniffer for ARM binaries. */
10485 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10486 bfd_target_elf_flavour,
10487 arm_elf_osabi_sniffer);
10488
10489 /* Add root prefix command for all "set arm"/"show arm" commands. */
10490 add_setshow_prefix_cmd ("arm", no_class,
10491 _("Various ARM-specific commands."),
10492 _("Various ARM-specific commands."),
10493 &setarmcmdlist, &showarmcmdlist,
10494 &setlist, &showlist);
10495
10496 arm_disassembler_options = xstrdup ("reg-names-std");
10497 const disasm_options_t *disasm_options
10498 = &disassembler_options_arm ()->options;
10499 int num_disassembly_styles = 0;
10500 for (i = 0; disasm_options->name[i] != NULL; i++)
10501 if (startswith (disasm_options->name[i], "reg-names-"))
10502 num_disassembly_styles++;
10503
10504 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10505 valid_disassembly_styles = XNEWVEC (const char *,
10506 num_disassembly_styles + 1);
10507 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10508 if (startswith (disasm_options->name[i], "reg-names-"))
10509 {
10510 size_t offset = strlen ("reg-names-");
10511 const char *style = disasm_options->name[i];
10512 valid_disassembly_styles[j++] = &style[offset];
10513 if (strcmp (&style[offset], "std") == 0)
10514 disassembly_style = &style[offset];
10515 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10516 disasm_options->description[i]);
10517 rdptr += length;
10518 rest -= length;
10519 }
10520 /* Mark the end of valid options. */
10521 valid_disassembly_styles[num_disassembly_styles] = NULL;
10522
10523 /* Create the help text. */
10524 std::string helptext = string_printf ("%s%s%s",
10525 _("The valid values are:\n"),
10526 regdesc,
10527 _("The default is \"std\"."));
10528
10529 add_setshow_enum_cmd("disassembler", no_class,
10530 valid_disassembly_styles, &disassembly_style,
10531 _("Set the disassembly style."),
10532 _("Show the disassembly style."),
10533 helptext.c_str (),
10534 set_disassembly_style_sfunc,
10535 show_disassembly_style_sfunc,
10536 &setarmcmdlist, &showarmcmdlist);
10537
10538 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10539 _("Set usage of ARM 32-bit mode."),
10540 _("Show usage of ARM 32-bit mode."),
10541 _("When off, a 26-bit PC will be used."),
10542 NULL,
10543 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10544 mode is %s. */
10545 &setarmcmdlist, &showarmcmdlist);
10546
10547 /* Add a command to allow the user to force the FPU model. */
10548 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10549 _("Set the floating point type."),
10550 _("Show the floating point type."),
10551 _("auto - Determine the FP typefrom the OS-ABI.\n\
10552 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10553 fpa - FPA co-processor (GCC compiled).\n\
10554 softvfp - Software FP with pure-endian doubles.\n\
10555 vfp - VFP co-processor."),
10556 set_fp_model_sfunc, show_fp_model,
10557 &setarmcmdlist, &showarmcmdlist);
10558
10559 /* Add a command to allow the user to force the ABI. */
10560 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10561 _("Set the ABI."),
10562 _("Show the ABI."),
10563 NULL, arm_set_abi, arm_show_abi,
10564 &setarmcmdlist, &showarmcmdlist);
10565
10566 /* Add two commands to allow the user to force the assumed
10567 execution mode. */
10568 add_setshow_enum_cmd ("fallback-mode", class_support,
10569 arm_mode_strings, &arm_fallback_mode_string,
10570 _("Set the mode assumed when symbols are unavailable."),
10571 _("Show the mode assumed when symbols are unavailable."),
10572 NULL, NULL, arm_show_fallback_mode,
10573 &setarmcmdlist, &showarmcmdlist);
10574 add_setshow_enum_cmd ("force-mode", class_support,
10575 arm_mode_strings, &arm_force_mode_string,
10576 _("Set the mode assumed even when symbols are available."),
10577 _("Show the mode assumed even when symbols are available."),
10578 NULL, NULL, arm_show_force_mode,
10579 &setarmcmdlist, &showarmcmdlist);
10580
10581 /* Add a command to stop triggering security exceptions when
10582 unwinding exception stacks. */
10583 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
10584 _("Set usage of non-secure to secure exception stack unwinding."),
10585 _("Show usage of non-secure to secure exception stack unwinding."),
10586 _("When on, the debugger can trigger memory access traps."),
10587 NULL, arm_show_unwind_secure_frames,
10588 &setarmcmdlist, &showarmcmdlist);
10589
10590 /* Debugging flag. */
10591 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10592 _("Set ARM debugging."),
10593 _("Show ARM debugging."),
10594 _("When on, arm-specific debugging is enabled."),
10595 NULL,
10596 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10597 &setdebuglist, &showdebuglist);
10598
10599 #if GDB_SELF_TEST
10600 selftests::register_test ("arm-record", selftests::arm_record_test);
10601 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
10602 #endif
10603
10604 }
10605
10606 /* ARM-reversible process record data structures. */
10607
10608 #define ARM_INSN_SIZE_BYTES 4
10609 #define THUMB_INSN_SIZE_BYTES 2
10610 #define THUMB2_INSN_SIZE_BYTES 4
10611
10612
10613 /* Position of the bit within a 32-bit ARM instruction
10614 that defines whether the instruction is a load or store. */
10615 #define INSN_S_L_BIT_NUM 20
10616
10617 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10618 do \
10619 { \
10620 unsigned int reg_len = LENGTH; \
10621 if (reg_len) \
10622 { \
10623 REGS = XNEWVEC (uint32_t, reg_len); \
10624 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10625 } \
10626 } \
10627 while (0)
10628
10629 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10630 do \
10631 { \
10632 unsigned int mem_len = LENGTH; \
10633 if (mem_len) \
10634 { \
10635 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10636 memcpy(&MEMS->len, &RECORD_BUF[0], \
10637 sizeof(struct arm_mem_r) * LENGTH); \
10638 } \
10639 } \
10640 while (0)
10641
10642 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10643 #define INSN_RECORDED(ARM_RECORD) \
10644 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10645
10646 /* ARM memory record structure. */
10647 struct arm_mem_r
10648 {
10649 uint32_t len; /* Record length. */
10650 uint32_t addr; /* Memory address. */
10651 };
10652
10653 /* ARM instruction record contains opcode of current insn
10654 and execution state (before entry to decode_insn()),
10655 contains list of to-be-modified registers and
10656 memory blocks (on return from decode_insn()). */
10657
10658 typedef struct insn_decode_record_t
10659 {
10660 struct gdbarch *gdbarch;
10661 struct regcache *regcache;
10662 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10663 uint32_t arm_insn; /* Should accommodate thumb. */
10664 uint32_t cond; /* Condition code. */
10665 uint32_t opcode; /* Insn opcode. */
10666 uint32_t decode; /* Insn decode bits. */
10667 uint32_t mem_rec_count; /* No of mem records. */
10668 uint32_t reg_rec_count; /* No of reg records. */
10669 uint32_t *arm_regs; /* Registers to be saved for this record. */
10670 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10671 } insn_decode_record;
10672
10673
10674 /* Checks ARM SBZ and SBO mandatory fields. */
10675
10676 static int
10677 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10678 {
10679 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10680
10681 if (!len)
10682 return 1;
10683
10684 if (!sbo)
10685 ones = ~ones;
10686
10687 while (ones)
10688 {
10689 if (!(ones & sbo))
10690 {
10691 return 0;
10692 }
10693 ones = ones >> 1;
10694 }
10695 return 1;
10696 }
10697
10698 enum arm_record_result
10699 {
10700 ARM_RECORD_SUCCESS = 0,
10701 ARM_RECORD_FAILURE = 1
10702 };
10703
10704 typedef enum
10705 {
10706 ARM_RECORD_STRH=1,
10707 ARM_RECORD_STRD
10708 } arm_record_strx_t;
10709
10710 typedef enum
10711 {
10712 ARM_RECORD=1,
10713 THUMB_RECORD,
10714 THUMB2_RECORD
10715 } record_type_t;
10716
10717
10718 static int
10719 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10720 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10721 {
10722
10723 struct regcache *reg_cache = arm_insn_r->regcache;
10724 ULONGEST u_regval[2]= {0};
10725
10726 uint32_t reg_src1 = 0, reg_src2 = 0;
10727 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10728
10729 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10730 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10731
10732 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10733 {
10734 /* 1) Handle misc store, immediate offset. */
10735 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10736 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10737 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10738 regcache_raw_read_unsigned (reg_cache, reg_src1,
10739 &u_regval[0]);
10740 if (ARM_PC_REGNUM == reg_src1)
10741 {
10742 /* If R15 was used as Rn, hence current PC+8. */
10743 u_regval[0] = u_regval[0] + 8;
10744 }
10745 offset_8 = (immed_high << 4) | immed_low;
10746 /* Calculate target store address. */
10747 if (14 == arm_insn_r->opcode)
10748 {
10749 tgt_mem_addr = u_regval[0] + offset_8;
10750 }
10751 else
10752 {
10753 tgt_mem_addr = u_regval[0] - offset_8;
10754 }
10755 if (ARM_RECORD_STRH == str_type)
10756 {
10757 record_buf_mem[0] = 2;
10758 record_buf_mem[1] = tgt_mem_addr;
10759 arm_insn_r->mem_rec_count = 1;
10760 }
10761 else if (ARM_RECORD_STRD == str_type)
10762 {
10763 record_buf_mem[0] = 4;
10764 record_buf_mem[1] = tgt_mem_addr;
10765 record_buf_mem[2] = 4;
10766 record_buf_mem[3] = tgt_mem_addr + 4;
10767 arm_insn_r->mem_rec_count = 2;
10768 }
10769 }
10770 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10771 {
10772 /* 2) Store, register offset. */
10773 /* Get Rm. */
10774 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10775 /* Get Rn. */
10776 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10777 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10778 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10779 if (15 == reg_src2)
10780 {
10781 /* If R15 was used as Rn, hence current PC+8. */
10782 u_regval[0] = u_regval[0] + 8;
10783 }
10784 /* Calculate target store address, Rn +/- Rm, register offset. */
10785 if (12 == arm_insn_r->opcode)
10786 {
10787 tgt_mem_addr = u_regval[0] + u_regval[1];
10788 }
10789 else
10790 {
10791 tgt_mem_addr = u_regval[1] - u_regval[0];
10792 }
10793 if (ARM_RECORD_STRH == str_type)
10794 {
10795 record_buf_mem[0] = 2;
10796 record_buf_mem[1] = tgt_mem_addr;
10797 arm_insn_r->mem_rec_count = 1;
10798 }
10799 else if (ARM_RECORD_STRD == str_type)
10800 {
10801 record_buf_mem[0] = 4;
10802 record_buf_mem[1] = tgt_mem_addr;
10803 record_buf_mem[2] = 4;
10804 record_buf_mem[3] = tgt_mem_addr + 4;
10805 arm_insn_r->mem_rec_count = 2;
10806 }
10807 }
10808 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10809 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10810 {
10811 /* 3) Store, immediate pre-indexed. */
10812 /* 5) Store, immediate post-indexed. */
10813 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10814 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10815 offset_8 = (immed_high << 4) | immed_low;
10816 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10817 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10818 /* Calculate target store address, Rn +/- Rm, register offset. */
10819 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10820 {
10821 tgt_mem_addr = u_regval[0] + offset_8;
10822 }
10823 else
10824 {
10825 tgt_mem_addr = u_regval[0] - offset_8;
10826 }
10827 if (ARM_RECORD_STRH == str_type)
10828 {
10829 record_buf_mem[0] = 2;
10830 record_buf_mem[1] = tgt_mem_addr;
10831 arm_insn_r->mem_rec_count = 1;
10832 }
10833 else if (ARM_RECORD_STRD == str_type)
10834 {
10835 record_buf_mem[0] = 4;
10836 record_buf_mem[1] = tgt_mem_addr;
10837 record_buf_mem[2] = 4;
10838 record_buf_mem[3] = tgt_mem_addr + 4;
10839 arm_insn_r->mem_rec_count = 2;
10840 }
10841 /* Record Rn also as it changes. */
10842 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10843 arm_insn_r->reg_rec_count = 1;
10844 }
10845 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10846 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10847 {
10848 /* 4) Store, register pre-indexed. */
10849 /* 6) Store, register post -indexed. */
10850 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10851 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10852 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10853 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10854 /* Calculate target store address, Rn +/- Rm, register offset. */
10855 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10856 {
10857 tgt_mem_addr = u_regval[0] + u_regval[1];
10858 }
10859 else
10860 {
10861 tgt_mem_addr = u_regval[1] - u_regval[0];
10862 }
10863 if (ARM_RECORD_STRH == str_type)
10864 {
10865 record_buf_mem[0] = 2;
10866 record_buf_mem[1] = tgt_mem_addr;
10867 arm_insn_r->mem_rec_count = 1;
10868 }
10869 else if (ARM_RECORD_STRD == str_type)
10870 {
10871 record_buf_mem[0] = 4;
10872 record_buf_mem[1] = tgt_mem_addr;
10873 record_buf_mem[2] = 4;
10874 record_buf_mem[3] = tgt_mem_addr + 4;
10875 arm_insn_r->mem_rec_count = 2;
10876 }
10877 /* Record Rn also as it changes. */
10878 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10879 arm_insn_r->reg_rec_count = 1;
10880 }
10881 return 0;
10882 }
10883
10884 /* Handling ARM extension space insns. */
10885
10886 static int
10887 arm_record_extension_space (insn_decode_record *arm_insn_r)
10888 {
10889 int ret = 0; /* Return value: -1:record failure ; 0:success */
10890 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10891 uint32_t record_buf[8], record_buf_mem[8];
10892 uint32_t reg_src1 = 0;
10893 struct regcache *reg_cache = arm_insn_r->regcache;
10894 ULONGEST u_regval = 0;
10895
10896 gdb_assert (!INSN_RECORDED(arm_insn_r));
10897 /* Handle unconditional insn extension space. */
10898
10899 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10900 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10901 if (arm_insn_r->cond)
10902 {
10903 /* PLD has no affect on architectural state, it just affects
10904 the caches. */
10905 if (5 == ((opcode1 & 0xE0) >> 5))
10906 {
10907 /* BLX(1) */
10908 record_buf[0] = ARM_PS_REGNUM;
10909 record_buf[1] = ARM_LR_REGNUM;
10910 arm_insn_r->reg_rec_count = 2;
10911 }
10912 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10913 }
10914
10915
10916 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10917 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10918 {
10919 ret = -1;
10920 /* Undefined instruction on ARM V5; need to handle if later
10921 versions define it. */
10922 }
10923
10924 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10925 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10926 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10927
10928 /* Handle arithmetic insn extension space. */
10929 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10930 && !INSN_RECORDED(arm_insn_r))
10931 {
10932 /* Handle MLA(S) and MUL(S). */
10933 if (in_inclusive_range (insn_op1, 0U, 3U))
10934 {
10935 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10936 record_buf[1] = ARM_PS_REGNUM;
10937 arm_insn_r->reg_rec_count = 2;
10938 }
10939 else if (in_inclusive_range (insn_op1, 4U, 15U))
10940 {
10941 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10942 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10943 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10944 record_buf[2] = ARM_PS_REGNUM;
10945 arm_insn_r->reg_rec_count = 3;
10946 }
10947 }
10948
10949 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10950 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10951 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10952
10953 /* Handle control insn extension space. */
10954
10955 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10956 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10957 {
10958 if (!bit (arm_insn_r->arm_insn,25))
10959 {
10960 if (!bits (arm_insn_r->arm_insn, 4, 7))
10961 {
10962 if ((0 == insn_op1) || (2 == insn_op1))
10963 {
10964 /* MRS. */
10965 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10966 arm_insn_r->reg_rec_count = 1;
10967 }
10968 else if (1 == insn_op1)
10969 {
10970 /* CSPR is going to be changed. */
10971 record_buf[0] = ARM_PS_REGNUM;
10972 arm_insn_r->reg_rec_count = 1;
10973 }
10974 else if (3 == insn_op1)
10975 {
10976 /* SPSR is going to be changed. */
10977 /* We need to get SPSR value, which is yet to be done. */
10978 return -1;
10979 }
10980 }
10981 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10982 {
10983 if (1 == insn_op1)
10984 {
10985 /* BX. */
10986 record_buf[0] = ARM_PS_REGNUM;
10987 arm_insn_r->reg_rec_count = 1;
10988 }
10989 else if (3 == insn_op1)
10990 {
10991 /* CLZ. */
10992 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10993 arm_insn_r->reg_rec_count = 1;
10994 }
10995 }
10996 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10997 {
10998 /* BLX. */
10999 record_buf[0] = ARM_PS_REGNUM;
11000 record_buf[1] = ARM_LR_REGNUM;
11001 arm_insn_r->reg_rec_count = 2;
11002 }
11003 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11004 {
11005 /* QADD, QSUB, QDADD, QDSUB */
11006 record_buf[0] = ARM_PS_REGNUM;
11007 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11008 arm_insn_r->reg_rec_count = 2;
11009 }
11010 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11011 {
11012 /* BKPT. */
11013 record_buf[0] = ARM_PS_REGNUM;
11014 record_buf[1] = ARM_LR_REGNUM;
11015 arm_insn_r->reg_rec_count = 2;
11016
11017 /* Save SPSR also;how? */
11018 return -1;
11019 }
11020 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11021 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11022 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11023 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11024 )
11025 {
11026 if (0 == insn_op1 || 1 == insn_op1)
11027 {
11028 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11029 /* We dont do optimization for SMULW<y> where we
11030 need only Rd. */
11031 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11032 record_buf[1] = ARM_PS_REGNUM;
11033 arm_insn_r->reg_rec_count = 2;
11034 }
11035 else if (2 == insn_op1)
11036 {
11037 /* SMLAL<x><y>. */
11038 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11039 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11040 arm_insn_r->reg_rec_count = 2;
11041 }
11042 else if (3 == insn_op1)
11043 {
11044 /* SMUL<x><y>. */
11045 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11046 arm_insn_r->reg_rec_count = 1;
11047 }
11048 }
11049 }
11050 else
11051 {
11052 /* MSR : immediate form. */
11053 if (1 == insn_op1)
11054 {
11055 /* CSPR is going to be changed. */
11056 record_buf[0] = ARM_PS_REGNUM;
11057 arm_insn_r->reg_rec_count = 1;
11058 }
11059 else if (3 == insn_op1)
11060 {
11061 /* SPSR is going to be changed. */
11062 /* we need to get SPSR value, which is yet to be done */
11063 return -1;
11064 }
11065 }
11066 }
11067
11068 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11069 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11070 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11071
11072 /* Handle load/store insn extension space. */
11073
11074 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11075 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11076 && !INSN_RECORDED(arm_insn_r))
11077 {
11078 /* SWP/SWPB. */
11079 if (0 == insn_op1)
11080 {
11081 /* These insn, changes register and memory as well. */
11082 /* SWP or SWPB insn. */
11083 /* Get memory address given by Rn. */
11084 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11085 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11086 /* SWP insn ?, swaps word. */
11087 if (8 == arm_insn_r->opcode)
11088 {
11089 record_buf_mem[0] = 4;
11090 }
11091 else
11092 {
11093 /* SWPB insn, swaps only byte. */
11094 record_buf_mem[0] = 1;
11095 }
11096 record_buf_mem[1] = u_regval;
11097 arm_insn_r->mem_rec_count = 1;
11098 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11099 arm_insn_r->reg_rec_count = 1;
11100 }
11101 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11102 {
11103 /* STRH. */
11104 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11105 ARM_RECORD_STRH);
11106 }
11107 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11108 {
11109 /* LDRD. */
11110 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11111 record_buf[1] = record_buf[0] + 1;
11112 arm_insn_r->reg_rec_count = 2;
11113 }
11114 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11115 {
11116 /* STRD. */
11117 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11118 ARM_RECORD_STRD);
11119 }
11120 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11121 {
11122 /* LDRH, LDRSB, LDRSH. */
11123 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11124 arm_insn_r->reg_rec_count = 1;
11125 }
11126
11127 }
11128
11129 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11130 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11131 && !INSN_RECORDED(arm_insn_r))
11132 {
11133 ret = -1;
11134 /* Handle coprocessor insn extension space. */
11135 }
11136
11137 /* To be done for ARMv5 and later; as of now we return -1. */
11138 if (-1 == ret)
11139 return ret;
11140
11141 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11142 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11143
11144 return ret;
11145 }
11146
11147 /* Handling opcode 000 insns. */
11148
11149 static int
11150 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11151 {
11152 struct regcache *reg_cache = arm_insn_r->regcache;
11153 uint32_t record_buf[8], record_buf_mem[8];
11154 ULONGEST u_regval[2] = {0};
11155
11156 uint32_t reg_src1 = 0;
11157 uint32_t opcode1 = 0;
11158
11159 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11160 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11161 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11162
11163 if (!((opcode1 & 0x19) == 0x10))
11164 {
11165 /* Data-processing (register) and Data-processing (register-shifted
11166 register */
11167 /* Out of 11 shifter operands mode, all the insn modifies destination
11168 register, which is specified by 13-16 decode. */
11169 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11170 record_buf[1] = ARM_PS_REGNUM;
11171 arm_insn_r->reg_rec_count = 2;
11172 }
11173 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11174 {
11175 /* Miscellaneous instructions */
11176
11177 if (3 == arm_insn_r->decode && 0x12 == opcode1
11178 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11179 {
11180 /* Handle BLX, branch and link/exchange. */
11181 if (9 == arm_insn_r->opcode)
11182 {
11183 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11184 and R14 stores the return address. */
11185 record_buf[0] = ARM_PS_REGNUM;
11186 record_buf[1] = ARM_LR_REGNUM;
11187 arm_insn_r->reg_rec_count = 2;
11188 }
11189 }
11190 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11191 {
11192 /* Handle enhanced software breakpoint insn, BKPT. */
11193 /* CPSR is changed to be executed in ARM state, disabling normal
11194 interrupts, entering abort mode. */
11195 /* According to high vector configuration PC is set. */
11196 /* user hit breakpoint and type reverse, in
11197 that case, we need to go back with previous CPSR and
11198 Program Counter. */
11199 record_buf[0] = ARM_PS_REGNUM;
11200 record_buf[1] = ARM_LR_REGNUM;
11201 arm_insn_r->reg_rec_count = 2;
11202
11203 /* Save SPSR also; how? */
11204 return -1;
11205 }
11206 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11207 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11208 {
11209 /* Handle BX, branch and link/exchange. */
11210 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11211 record_buf[0] = ARM_PS_REGNUM;
11212 arm_insn_r->reg_rec_count = 1;
11213 }
11214 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11215 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11216 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11217 {
11218 /* Count leading zeros: CLZ. */
11219 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11220 arm_insn_r->reg_rec_count = 1;
11221 }
11222 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11223 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11224 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11225 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11226 {
11227 /* Handle MRS insn. */
11228 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11229 arm_insn_r->reg_rec_count = 1;
11230 }
11231 }
11232 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11233 {
11234 /* Multiply and multiply-accumulate */
11235
11236 /* Handle multiply instructions. */
11237 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11238 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11239 {
11240 /* Handle MLA and MUL. */
11241 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11242 record_buf[1] = ARM_PS_REGNUM;
11243 arm_insn_r->reg_rec_count = 2;
11244 }
11245 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11246 {
11247 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11248 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11249 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11250 record_buf[2] = ARM_PS_REGNUM;
11251 arm_insn_r->reg_rec_count = 3;
11252 }
11253 }
11254 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11255 {
11256 /* Synchronization primitives */
11257
11258 /* Handling SWP, SWPB. */
11259 /* These insn, changes register and memory as well. */
11260 /* SWP or SWPB insn. */
11261
11262 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11263 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11264 /* SWP insn ?, swaps word. */
11265 if (8 == arm_insn_r->opcode)
11266 {
11267 record_buf_mem[0] = 4;
11268 }
11269 else
11270 {
11271 /* SWPB insn, swaps only byte. */
11272 record_buf_mem[0] = 1;
11273 }
11274 record_buf_mem[1] = u_regval[0];
11275 arm_insn_r->mem_rec_count = 1;
11276 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11277 arm_insn_r->reg_rec_count = 1;
11278 }
11279 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11280 || 15 == arm_insn_r->decode)
11281 {
11282 if ((opcode1 & 0x12) == 2)
11283 {
11284 /* Extra load/store (unprivileged) */
11285 return -1;
11286 }
11287 else
11288 {
11289 /* Extra load/store */
11290 switch (bits (arm_insn_r->arm_insn, 5, 6))
11291 {
11292 case 1:
11293 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11294 {
11295 /* STRH (register), STRH (immediate) */
11296 arm_record_strx (arm_insn_r, &record_buf[0],
11297 &record_buf_mem[0], ARM_RECORD_STRH);
11298 }
11299 else if ((opcode1 & 0x05) == 0x1)
11300 {
11301 /* LDRH (register) */
11302 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11303 arm_insn_r->reg_rec_count = 1;
11304
11305 if (bit (arm_insn_r->arm_insn, 21))
11306 {
11307 /* Write back to Rn. */
11308 record_buf[arm_insn_r->reg_rec_count++]
11309 = bits (arm_insn_r->arm_insn, 16, 19);
11310 }
11311 }
11312 else if ((opcode1 & 0x05) == 0x5)
11313 {
11314 /* LDRH (immediate), LDRH (literal) */
11315 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11316
11317 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11318 arm_insn_r->reg_rec_count = 1;
11319
11320 if (rn != 15)
11321 {
11322 /*LDRH (immediate) */
11323 if (bit (arm_insn_r->arm_insn, 21))
11324 {
11325 /* Write back to Rn. */
11326 record_buf[arm_insn_r->reg_rec_count++] = rn;
11327 }
11328 }
11329 }
11330 else
11331 return -1;
11332 break;
11333 case 2:
11334 if ((opcode1 & 0x05) == 0x0)
11335 {
11336 /* LDRD (register) */
11337 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11338 record_buf[1] = record_buf[0] + 1;
11339 arm_insn_r->reg_rec_count = 2;
11340
11341 if (bit (arm_insn_r->arm_insn, 21))
11342 {
11343 /* Write back to Rn. */
11344 record_buf[arm_insn_r->reg_rec_count++]
11345 = bits (arm_insn_r->arm_insn, 16, 19);
11346 }
11347 }
11348 else if ((opcode1 & 0x05) == 0x1)
11349 {
11350 /* LDRSB (register) */
11351 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11352 arm_insn_r->reg_rec_count = 1;
11353
11354 if (bit (arm_insn_r->arm_insn, 21))
11355 {
11356 /* Write back to Rn. */
11357 record_buf[arm_insn_r->reg_rec_count++]
11358 = bits (arm_insn_r->arm_insn, 16, 19);
11359 }
11360 }
11361 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11362 {
11363 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11364 LDRSB (literal) */
11365 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11366
11367 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11368 arm_insn_r->reg_rec_count = 1;
11369
11370 if (rn != 15)
11371 {
11372 /*LDRD (immediate), LDRSB (immediate) */
11373 if (bit (arm_insn_r->arm_insn, 21))
11374 {
11375 /* Write back to Rn. */
11376 record_buf[arm_insn_r->reg_rec_count++] = rn;
11377 }
11378 }
11379 }
11380 else
11381 return -1;
11382 break;
11383 case 3:
11384 if ((opcode1 & 0x05) == 0x0)
11385 {
11386 /* STRD (register) */
11387 arm_record_strx (arm_insn_r, &record_buf[0],
11388 &record_buf_mem[0], ARM_RECORD_STRD);
11389 }
11390 else if ((opcode1 & 0x05) == 0x1)
11391 {
11392 /* LDRSH (register) */
11393 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11394 arm_insn_r->reg_rec_count = 1;
11395
11396 if (bit (arm_insn_r->arm_insn, 21))
11397 {
11398 /* Write back to Rn. */
11399 record_buf[arm_insn_r->reg_rec_count++]
11400 = bits (arm_insn_r->arm_insn, 16, 19);
11401 }
11402 }
11403 else if ((opcode1 & 0x05) == 0x4)
11404 {
11405 /* STRD (immediate) */
11406 arm_record_strx (arm_insn_r, &record_buf[0],
11407 &record_buf_mem[0], ARM_RECORD_STRD);
11408 }
11409 else if ((opcode1 & 0x05) == 0x5)
11410 {
11411 /* LDRSH (immediate), LDRSH (literal) */
11412 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11413 arm_insn_r->reg_rec_count = 1;
11414
11415 if (bit (arm_insn_r->arm_insn, 21))
11416 {
11417 /* Write back to Rn. */
11418 record_buf[arm_insn_r->reg_rec_count++]
11419 = bits (arm_insn_r->arm_insn, 16, 19);
11420 }
11421 }
11422 else
11423 return -1;
11424 break;
11425 default:
11426 return -1;
11427 }
11428 }
11429 }
11430 else
11431 {
11432 return -1;
11433 }
11434
11435 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11436 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11437 return 0;
11438 }
11439
11440 /* Handling opcode 001 insns. */
11441
11442 static int
11443 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11444 {
11445 uint32_t record_buf[8], record_buf_mem[8];
11446
11447 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11448 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11449
11450 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11451 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11452 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11453 )
11454 {
11455 /* Handle MSR insn. */
11456 if (9 == arm_insn_r->opcode)
11457 {
11458 /* CSPR is going to be changed. */
11459 record_buf[0] = ARM_PS_REGNUM;
11460 arm_insn_r->reg_rec_count = 1;
11461 }
11462 else
11463 {
11464 /* SPSR is going to be changed. */
11465 }
11466 }
11467 else if (arm_insn_r->opcode <= 15)
11468 {
11469 /* Normal data processing insns. */
11470 /* Out of 11 shifter operands mode, all the insn modifies destination
11471 register, which is specified by 13-16 decode. */
11472 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11473 record_buf[1] = ARM_PS_REGNUM;
11474 arm_insn_r->reg_rec_count = 2;
11475 }
11476 else
11477 {
11478 return -1;
11479 }
11480
11481 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11482 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11483 return 0;
11484 }
11485
11486 static int
11487 arm_record_media (insn_decode_record *arm_insn_r)
11488 {
11489 uint32_t record_buf[8];
11490
11491 switch (bits (arm_insn_r->arm_insn, 22, 24))
11492 {
11493 case 0:
11494 /* Parallel addition and subtraction, signed */
11495 case 1:
11496 /* Parallel addition and subtraction, unsigned */
11497 case 2:
11498 case 3:
11499 /* Packing, unpacking, saturation and reversal */
11500 {
11501 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11502
11503 record_buf[arm_insn_r->reg_rec_count++] = rd;
11504 }
11505 break;
11506
11507 case 4:
11508 case 5:
11509 /* Signed multiplies */
11510 {
11511 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11512 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11513
11514 record_buf[arm_insn_r->reg_rec_count++] = rd;
11515 if (op1 == 0x0)
11516 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11517 else if (op1 == 0x4)
11518 record_buf[arm_insn_r->reg_rec_count++]
11519 = bits (arm_insn_r->arm_insn, 12, 15);
11520 }
11521 break;
11522
11523 case 6:
11524 {
11525 if (bit (arm_insn_r->arm_insn, 21)
11526 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
11527 {
11528 /* SBFX */
11529 record_buf[arm_insn_r->reg_rec_count++]
11530 = bits (arm_insn_r->arm_insn, 12, 15);
11531 }
11532 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
11533 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
11534 {
11535 /* USAD8 and USADA8 */
11536 record_buf[arm_insn_r->reg_rec_count++]
11537 = bits (arm_insn_r->arm_insn, 16, 19);
11538 }
11539 }
11540 break;
11541
11542 case 7:
11543 {
11544 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
11545 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
11546 {
11547 /* Permanently UNDEFINED */
11548 return -1;
11549 }
11550 else
11551 {
11552 /* BFC, BFI and UBFX */
11553 record_buf[arm_insn_r->reg_rec_count++]
11554 = bits (arm_insn_r->arm_insn, 12, 15);
11555 }
11556 }
11557 break;
11558
11559 default:
11560 return -1;
11561 }
11562
11563 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11564
11565 return 0;
11566 }
11567
11568 /* Handle ARM mode instructions with opcode 010. */
11569
11570 static int
11571 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11572 {
11573 struct regcache *reg_cache = arm_insn_r->regcache;
11574
11575 uint32_t reg_base , reg_dest;
11576 uint32_t offset_12, tgt_mem_addr;
11577 uint32_t record_buf[8], record_buf_mem[8];
11578 unsigned char wback;
11579 ULONGEST u_regval;
11580
11581 /* Calculate wback. */
11582 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11583 || (bit (arm_insn_r->arm_insn, 21) == 1);
11584
11585 arm_insn_r->reg_rec_count = 0;
11586 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11587
11588 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11589 {
11590 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11591 and LDRT. */
11592
11593 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11594 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11595
11596 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11597 preceeds a LDR instruction having R15 as reg_base, it
11598 emulates a branch and link instruction, and hence we need to save
11599 CPSR and PC as well. */
11600 if (ARM_PC_REGNUM == reg_dest)
11601 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11602
11603 /* If wback is true, also save the base register, which is going to be
11604 written to. */
11605 if (wback)
11606 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11607 }
11608 else
11609 {
11610 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11611
11612 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11613 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11614
11615 /* Handle bit U. */
11616 if (bit (arm_insn_r->arm_insn, 23))
11617 {
11618 /* U == 1: Add the offset. */
11619 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11620 }
11621 else
11622 {
11623 /* U == 0: subtract the offset. */
11624 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11625 }
11626
11627 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11628 bytes. */
11629 if (bit (arm_insn_r->arm_insn, 22))
11630 {
11631 /* STRB and STRBT: 1 byte. */
11632 record_buf_mem[0] = 1;
11633 }
11634 else
11635 {
11636 /* STR and STRT: 4 bytes. */
11637 record_buf_mem[0] = 4;
11638 }
11639
11640 /* Handle bit P. */
11641 if (bit (arm_insn_r->arm_insn, 24))
11642 record_buf_mem[1] = tgt_mem_addr;
11643 else
11644 record_buf_mem[1] = (uint32_t) u_regval;
11645
11646 arm_insn_r->mem_rec_count = 1;
11647
11648 /* If wback is true, also save the base register, which is going to be
11649 written to. */
11650 if (wback)
11651 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11652 }
11653
11654 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11655 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11656 return 0;
11657 }
11658
11659 /* Handling opcode 011 insns. */
11660
11661 static int
11662 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11663 {
11664 struct regcache *reg_cache = arm_insn_r->regcache;
11665
11666 uint32_t shift_imm = 0;
11667 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11668 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11669 uint32_t record_buf[8], record_buf_mem[8];
11670
11671 LONGEST s_word;
11672 ULONGEST u_regval[2];
11673
11674 if (bit (arm_insn_r->arm_insn, 4))
11675 return arm_record_media (arm_insn_r);
11676
11677 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11678 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11679
11680 /* Handle enhanced store insns and LDRD DSP insn,
11681 order begins according to addressing modes for store insns
11682 STRH insn. */
11683
11684 /* LDR or STR? */
11685 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11686 {
11687 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11688 /* LDR insn has a capability to do branching, if
11689 MOV LR, PC is preceded by LDR insn having Rn as R15
11690 in that case, it emulates branch and link insn, and hence we
11691 need to save CSPR and PC as well. */
11692 if (15 != reg_dest)
11693 {
11694 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11695 arm_insn_r->reg_rec_count = 1;
11696 }
11697 else
11698 {
11699 record_buf[0] = reg_dest;
11700 record_buf[1] = ARM_PS_REGNUM;
11701 arm_insn_r->reg_rec_count = 2;
11702 }
11703 }
11704 else
11705 {
11706 if (! bits (arm_insn_r->arm_insn, 4, 11))
11707 {
11708 /* Store insn, register offset and register pre-indexed,
11709 register post-indexed. */
11710 /* Get Rm. */
11711 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11712 /* Get Rn. */
11713 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11714 regcache_raw_read_unsigned (reg_cache, reg_src1
11715 , &u_regval[0]);
11716 regcache_raw_read_unsigned (reg_cache, reg_src2
11717 , &u_regval[1]);
11718 if (15 == reg_src2)
11719 {
11720 /* If R15 was used as Rn, hence current PC+8. */
11721 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11722 u_regval[0] = u_regval[0] + 8;
11723 }
11724 /* Calculate target store address, Rn +/- Rm, register offset. */
11725 /* U == 1. */
11726 if (bit (arm_insn_r->arm_insn, 23))
11727 {
11728 tgt_mem_addr = u_regval[0] + u_regval[1];
11729 }
11730 else
11731 {
11732 tgt_mem_addr = u_regval[1] - u_regval[0];
11733 }
11734
11735 switch (arm_insn_r->opcode)
11736 {
11737 /* STR. */
11738 case 8:
11739 case 12:
11740 /* STR. */
11741 case 9:
11742 case 13:
11743 /* STRT. */
11744 case 1:
11745 case 5:
11746 /* STR. */
11747 case 0:
11748 case 4:
11749 record_buf_mem[0] = 4;
11750 break;
11751
11752 /* STRB. */
11753 case 10:
11754 case 14:
11755 /* STRB. */
11756 case 11:
11757 case 15:
11758 /* STRBT. */
11759 case 3:
11760 case 7:
11761 /* STRB. */
11762 case 2:
11763 case 6:
11764 record_buf_mem[0] = 1;
11765 break;
11766
11767 default:
11768 gdb_assert_not_reached ("no decoding pattern found");
11769 break;
11770 }
11771 record_buf_mem[1] = tgt_mem_addr;
11772 arm_insn_r->mem_rec_count = 1;
11773
11774 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11775 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11776 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11777 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11778 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11779 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11780 )
11781 {
11782 /* Rn is going to be changed in pre-indexed mode and
11783 post-indexed mode as well. */
11784 record_buf[0] = reg_src2;
11785 arm_insn_r->reg_rec_count = 1;
11786 }
11787 }
11788 else
11789 {
11790 /* Store insn, scaled register offset; scaled pre-indexed. */
11791 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11792 /* Get Rm. */
11793 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11794 /* Get Rn. */
11795 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11796 /* Get shift_imm. */
11797 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11798 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11799 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11800 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11801 /* Offset_12 used as shift. */
11802 switch (offset_12)
11803 {
11804 case 0:
11805 /* Offset_12 used as index. */
11806 offset_12 = u_regval[0] << shift_imm;
11807 break;
11808
11809 case 1:
11810 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11811 break;
11812
11813 case 2:
11814 if (!shift_imm)
11815 {
11816 if (bit (u_regval[0], 31))
11817 {
11818 offset_12 = 0xFFFFFFFF;
11819 }
11820 else
11821 {
11822 offset_12 = 0;
11823 }
11824 }
11825 else
11826 {
11827 /* This is arithmetic shift. */
11828 offset_12 = s_word >> shift_imm;
11829 }
11830 break;
11831
11832 case 3:
11833 if (!shift_imm)
11834 {
11835 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11836 &u_regval[1]);
11837 /* Get C flag value and shift it by 31. */
11838 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11839 | (u_regval[0]) >> 1);
11840 }
11841 else
11842 {
11843 offset_12 = (u_regval[0] >> shift_imm) \
11844 | (u_regval[0] <<
11845 (sizeof(uint32_t) - shift_imm));
11846 }
11847 break;
11848
11849 default:
11850 gdb_assert_not_reached ("no decoding pattern found");
11851 break;
11852 }
11853
11854 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11855 /* bit U set. */
11856 if (bit (arm_insn_r->arm_insn, 23))
11857 {
11858 tgt_mem_addr = u_regval[1] + offset_12;
11859 }
11860 else
11861 {
11862 tgt_mem_addr = u_regval[1] - offset_12;
11863 }
11864
11865 switch (arm_insn_r->opcode)
11866 {
11867 /* STR. */
11868 case 8:
11869 case 12:
11870 /* STR. */
11871 case 9:
11872 case 13:
11873 /* STRT. */
11874 case 1:
11875 case 5:
11876 /* STR. */
11877 case 0:
11878 case 4:
11879 record_buf_mem[0] = 4;
11880 break;
11881
11882 /* STRB. */
11883 case 10:
11884 case 14:
11885 /* STRB. */
11886 case 11:
11887 case 15:
11888 /* STRBT. */
11889 case 3:
11890 case 7:
11891 /* STRB. */
11892 case 2:
11893 case 6:
11894 record_buf_mem[0] = 1;
11895 break;
11896
11897 default:
11898 gdb_assert_not_reached ("no decoding pattern found");
11899 break;
11900 }
11901 record_buf_mem[1] = tgt_mem_addr;
11902 arm_insn_r->mem_rec_count = 1;
11903
11904 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11905 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11906 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11907 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11908 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11909 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11910 )
11911 {
11912 /* Rn is going to be changed in register scaled pre-indexed
11913 mode,and scaled post indexed mode. */
11914 record_buf[0] = reg_src2;
11915 arm_insn_r->reg_rec_count = 1;
11916 }
11917 }
11918 }
11919
11920 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11921 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11922 return 0;
11923 }
11924
11925 /* Handle ARM mode instructions with opcode 100. */
11926
11927 static int
11928 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11929 {
11930 struct regcache *reg_cache = arm_insn_r->regcache;
11931 uint32_t register_count = 0, register_bits;
11932 uint32_t reg_base, addr_mode;
11933 uint32_t record_buf[24], record_buf_mem[48];
11934 uint32_t wback;
11935 ULONGEST u_regval;
11936
11937 /* Fetch the list of registers. */
11938 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11939 arm_insn_r->reg_rec_count = 0;
11940
11941 /* Fetch the base register that contains the address we are loading data
11942 to. */
11943 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11944
11945 /* Calculate wback. */
11946 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11947
11948 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11949 {
11950 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11951
11952 /* Find out which registers are going to be loaded from memory. */
11953 while (register_bits)
11954 {
11955 if (register_bits & 0x00000001)
11956 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11957 register_bits = register_bits >> 1;
11958 register_count++;
11959 }
11960
11961
11962 /* If wback is true, also save the base register, which is going to be
11963 written to. */
11964 if (wback)
11965 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11966
11967 /* Save the CPSR register. */
11968 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11969 }
11970 else
11971 {
11972 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11973
11974 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11975
11976 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11977
11978 /* Find out how many registers are going to be stored to memory. */
11979 while (register_bits)
11980 {
11981 if (register_bits & 0x00000001)
11982 register_count++;
11983 register_bits = register_bits >> 1;
11984 }
11985
11986 switch (addr_mode)
11987 {
11988 /* STMDA (STMED): Decrement after. */
11989 case 0:
11990 record_buf_mem[1] = (uint32_t) u_regval
11991 - register_count * ARM_INT_REGISTER_SIZE + 4;
11992 break;
11993 /* STM (STMIA, STMEA): Increment after. */
11994 case 1:
11995 record_buf_mem[1] = (uint32_t) u_regval;
11996 break;
11997 /* STMDB (STMFD): Decrement before. */
11998 case 2:
11999 record_buf_mem[1] = (uint32_t) u_regval
12000 - register_count * ARM_INT_REGISTER_SIZE;
12001 break;
12002 /* STMIB (STMFA): Increment before. */
12003 case 3:
12004 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12005 break;
12006 default:
12007 gdb_assert_not_reached ("no decoding pattern found");
12008 break;
12009 }
12010
12011 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12012 arm_insn_r->mem_rec_count = 1;
12013
12014 /* If wback is true, also save the base register, which is going to be
12015 written to. */
12016 if (wback)
12017 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12018 }
12019
12020 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12021 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12022 return 0;
12023 }
12024
12025 /* Handling opcode 101 insns. */
12026
12027 static int
12028 arm_record_b_bl (insn_decode_record *arm_insn_r)
12029 {
12030 uint32_t record_buf[8];
12031
12032 /* Handle B, BL, BLX(1) insns. */
12033 /* B simply branches so we do nothing here. */
12034 /* Note: BLX(1) doesnt fall here but instead it falls into
12035 extension space. */
12036 if (bit (arm_insn_r->arm_insn, 24))
12037 {
12038 record_buf[0] = ARM_LR_REGNUM;
12039 arm_insn_r->reg_rec_count = 1;
12040 }
12041
12042 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12043
12044 return 0;
12045 }
12046
12047 static int
12048 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
12049 {
12050 gdb_printf (gdb_stderr,
12051 _("Process record does not support instruction "
12052 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12053 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12054
12055 return -1;
12056 }
12057
12058 /* Record handler for vector data transfer instructions. */
12059
12060 static int
12061 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
12062 {
12063 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12064 uint32_t record_buf[4];
12065
12066 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12067 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12068 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12069 bit_l = bit (arm_insn_r->arm_insn, 20);
12070 bit_c = bit (arm_insn_r->arm_insn, 8);
12071
12072 /* Handle VMOV instruction. */
12073 if (bit_l && bit_c)
12074 {
12075 record_buf[0] = reg_t;
12076 arm_insn_r->reg_rec_count = 1;
12077 }
12078 else if (bit_l && !bit_c)
12079 {
12080 /* Handle VMOV instruction. */
12081 if (bits_a == 0x00)
12082 {
12083 record_buf[0] = reg_t;
12084 arm_insn_r->reg_rec_count = 1;
12085 }
12086 /* Handle VMRS instruction. */
12087 else if (bits_a == 0x07)
12088 {
12089 if (reg_t == 15)
12090 reg_t = ARM_PS_REGNUM;
12091
12092 record_buf[0] = reg_t;
12093 arm_insn_r->reg_rec_count = 1;
12094 }
12095 }
12096 else if (!bit_l && !bit_c)
12097 {
12098 /* Handle VMOV instruction. */
12099 if (bits_a == 0x00)
12100 {
12101 record_buf[0] = ARM_D0_REGNUM + reg_v;
12102
12103 arm_insn_r->reg_rec_count = 1;
12104 }
12105 /* Handle VMSR instruction. */
12106 else if (bits_a == 0x07)
12107 {
12108 record_buf[0] = ARM_FPSCR_REGNUM;
12109 arm_insn_r->reg_rec_count = 1;
12110 }
12111 }
12112 else if (!bit_l && bit_c)
12113 {
12114 /* Handle VMOV instruction. */
12115 if (!(bits_a & 0x04))
12116 {
12117 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12118 + ARM_D0_REGNUM;
12119 arm_insn_r->reg_rec_count = 1;
12120 }
12121 /* Handle VDUP instruction. */
12122 else
12123 {
12124 if (bit (arm_insn_r->arm_insn, 21))
12125 {
12126 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12127 record_buf[0] = reg_v + ARM_D0_REGNUM;
12128 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12129 arm_insn_r->reg_rec_count = 2;
12130 }
12131 else
12132 {
12133 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12134 record_buf[0] = reg_v + ARM_D0_REGNUM;
12135 arm_insn_r->reg_rec_count = 1;
12136 }
12137 }
12138 }
12139
12140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12141 return 0;
12142 }
12143
12144 /* Record handler for extension register load/store instructions. */
12145
12146 static int
12147 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12148 {
12149 uint32_t opcode, single_reg;
12150 uint8_t op_vldm_vstm;
12151 uint32_t record_buf[8], record_buf_mem[128];
12152 ULONGEST u_regval = 0;
12153
12154 struct regcache *reg_cache = arm_insn_r->regcache;
12155
12156 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12157 single_reg = !bit (arm_insn_r->arm_insn, 8);
12158 op_vldm_vstm = opcode & 0x1b;
12159
12160 /* Handle VMOV instructions. */
12161 if ((opcode & 0x1e) == 0x04)
12162 {
12163 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12164 {
12165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12166 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12167 arm_insn_r->reg_rec_count = 2;
12168 }
12169 else
12170 {
12171 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12172 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12173
12174 if (single_reg)
12175 {
12176 /* The first S register number m is REG_M:M (M is bit 5),
12177 the corresponding D register number is REG_M:M / 2, which
12178 is REG_M. */
12179 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12180 /* The second S register number is REG_M:M + 1, the
12181 corresponding D register number is (REG_M:M + 1) / 2.
12182 IOW, if bit M is 1, the first and second S registers
12183 are mapped to different D registers, otherwise, they are
12184 in the same D register. */
12185 if (bit_m)
12186 {
12187 record_buf[arm_insn_r->reg_rec_count++]
12188 = ARM_D0_REGNUM + reg_m + 1;
12189 }
12190 }
12191 else
12192 {
12193 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12194 arm_insn_r->reg_rec_count = 1;
12195 }
12196 }
12197 }
12198 /* Handle VSTM and VPUSH instructions. */
12199 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12200 || op_vldm_vstm == 0x12)
12201 {
12202 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12203 uint32_t memory_index = 0;
12204
12205 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12206 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12207 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12208 imm_off32 = imm_off8 << 2;
12209 memory_count = imm_off8;
12210
12211 if (bit (arm_insn_r->arm_insn, 23))
12212 start_address = u_regval;
12213 else
12214 start_address = u_regval - imm_off32;
12215
12216 if (bit (arm_insn_r->arm_insn, 21))
12217 {
12218 record_buf[0] = reg_rn;
12219 arm_insn_r->reg_rec_count = 1;
12220 }
12221
12222 while (memory_count > 0)
12223 {
12224 if (single_reg)
12225 {
12226 record_buf_mem[memory_index] = 4;
12227 record_buf_mem[memory_index + 1] = start_address;
12228 start_address = start_address + 4;
12229 memory_index = memory_index + 2;
12230 }
12231 else
12232 {
12233 record_buf_mem[memory_index] = 4;
12234 record_buf_mem[memory_index + 1] = start_address;
12235 record_buf_mem[memory_index + 2] = 4;
12236 record_buf_mem[memory_index + 3] = start_address + 4;
12237 start_address = start_address + 8;
12238 memory_index = memory_index + 4;
12239 }
12240 memory_count--;
12241 }
12242 arm_insn_r->mem_rec_count = (memory_index >> 1);
12243 }
12244 /* Handle VLDM instructions. */
12245 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12246 || op_vldm_vstm == 0x13)
12247 {
12248 uint32_t reg_count, reg_vd;
12249 uint32_t reg_index = 0;
12250 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12251
12252 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12253 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12254
12255 /* REG_VD is the first D register number. If the instruction
12256 loads memory to S registers (SINGLE_REG is TRUE), the register
12257 number is (REG_VD << 1 | bit D), so the corresponding D
12258 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12259 if (!single_reg)
12260 reg_vd = reg_vd | (bit_d << 4);
12261
12262 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12263 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12264
12265 /* If the instruction loads memory to D register, REG_COUNT should
12266 be divided by 2, according to the ARM Architecture Reference
12267 Manual. If the instruction loads memory to S register, divide by
12268 2 as well because two S registers are mapped to D register. */
12269 reg_count = reg_count / 2;
12270 if (single_reg && bit_d)
12271 {
12272 /* Increase the register count if S register list starts from
12273 an odd number (bit d is one). */
12274 reg_count++;
12275 }
12276
12277 while (reg_count > 0)
12278 {
12279 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12280 reg_count--;
12281 }
12282 arm_insn_r->reg_rec_count = reg_index;
12283 }
12284 /* VSTR Vector store register. */
12285 else if ((opcode & 0x13) == 0x10)
12286 {
12287 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12288 uint32_t memory_index = 0;
12289
12290 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12291 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12292 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12293 imm_off32 = imm_off8 << 2;
12294
12295 if (bit (arm_insn_r->arm_insn, 23))
12296 start_address = u_regval + imm_off32;
12297 else
12298 start_address = u_regval - imm_off32;
12299
12300 if (single_reg)
12301 {
12302 record_buf_mem[memory_index] = 4;
12303 record_buf_mem[memory_index + 1] = start_address;
12304 arm_insn_r->mem_rec_count = 1;
12305 }
12306 else
12307 {
12308 record_buf_mem[memory_index] = 4;
12309 record_buf_mem[memory_index + 1] = start_address;
12310 record_buf_mem[memory_index + 2] = 4;
12311 record_buf_mem[memory_index + 3] = start_address + 4;
12312 arm_insn_r->mem_rec_count = 2;
12313 }
12314 }
12315 /* VLDR Vector load register. */
12316 else if ((opcode & 0x13) == 0x11)
12317 {
12318 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12319
12320 if (!single_reg)
12321 {
12322 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12323 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12324 }
12325 else
12326 {
12327 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12328 /* Record register D rather than pseudo register S. */
12329 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12330 }
12331 arm_insn_r->reg_rec_count = 1;
12332 }
12333
12334 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12335 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12336 return 0;
12337 }
12338
12339 /* Record handler for arm/thumb mode VFP data processing instructions. */
12340
12341 static int
12342 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12343 {
12344 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12345 uint32_t record_buf[4];
12346 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12347 enum insn_types curr_insn_type = INSN_INV;
12348
12349 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12350 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12351 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12352 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12353 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12354 bit_d = bit (arm_insn_r->arm_insn, 22);
12355 /* Mask off the "D" bit. */
12356 opc1 = opc1 & ~0x04;
12357
12358 /* Handle VMLA, VMLS. */
12359 if (opc1 == 0x00)
12360 {
12361 if (bit (arm_insn_r->arm_insn, 10))
12362 {
12363 if (bit (arm_insn_r->arm_insn, 6))
12364 curr_insn_type = INSN_T0;
12365 else
12366 curr_insn_type = INSN_T1;
12367 }
12368 else
12369 {
12370 if (dp_op_sz)
12371 curr_insn_type = INSN_T1;
12372 else
12373 curr_insn_type = INSN_T2;
12374 }
12375 }
12376 /* Handle VNMLA, VNMLS, VNMUL. */
12377 else if (opc1 == 0x01)
12378 {
12379 if (dp_op_sz)
12380 curr_insn_type = INSN_T1;
12381 else
12382 curr_insn_type = INSN_T2;
12383 }
12384 /* Handle VMUL. */
12385 else if (opc1 == 0x02 && !(opc3 & 0x01))
12386 {
12387 if (bit (arm_insn_r->arm_insn, 10))
12388 {
12389 if (bit (arm_insn_r->arm_insn, 6))
12390 curr_insn_type = INSN_T0;
12391 else
12392 curr_insn_type = INSN_T1;
12393 }
12394 else
12395 {
12396 if (dp_op_sz)
12397 curr_insn_type = INSN_T1;
12398 else
12399 curr_insn_type = INSN_T2;
12400 }
12401 }
12402 /* Handle VADD, VSUB. */
12403 else if (opc1 == 0x03)
12404 {
12405 if (!bit (arm_insn_r->arm_insn, 9))
12406 {
12407 if (bit (arm_insn_r->arm_insn, 6))
12408 curr_insn_type = INSN_T0;
12409 else
12410 curr_insn_type = INSN_T1;
12411 }
12412 else
12413 {
12414 if (dp_op_sz)
12415 curr_insn_type = INSN_T1;
12416 else
12417 curr_insn_type = INSN_T2;
12418 }
12419 }
12420 /* Handle VDIV. */
12421 else if (opc1 == 0x08)
12422 {
12423 if (dp_op_sz)
12424 curr_insn_type = INSN_T1;
12425 else
12426 curr_insn_type = INSN_T2;
12427 }
12428 /* Handle all other vfp data processing instructions. */
12429 else if (opc1 == 0x0b)
12430 {
12431 /* Handle VMOV. */
12432 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12433 {
12434 if (bit (arm_insn_r->arm_insn, 4))
12435 {
12436 if (bit (arm_insn_r->arm_insn, 6))
12437 curr_insn_type = INSN_T0;
12438 else
12439 curr_insn_type = INSN_T1;
12440 }
12441 else
12442 {
12443 if (dp_op_sz)
12444 curr_insn_type = INSN_T1;
12445 else
12446 curr_insn_type = INSN_T2;
12447 }
12448 }
12449 /* Handle VNEG and VABS. */
12450 else if ((opc2 == 0x01 && opc3 == 0x01)
12451 || (opc2 == 0x00 && opc3 == 0x03))
12452 {
12453 if (!bit (arm_insn_r->arm_insn, 11))
12454 {
12455 if (bit (arm_insn_r->arm_insn, 6))
12456 curr_insn_type = INSN_T0;
12457 else
12458 curr_insn_type = INSN_T1;
12459 }
12460 else
12461 {
12462 if (dp_op_sz)
12463 curr_insn_type = INSN_T1;
12464 else
12465 curr_insn_type = INSN_T2;
12466 }
12467 }
12468 /* Handle VSQRT. */
12469 else if (opc2 == 0x01 && opc3 == 0x03)
12470 {
12471 if (dp_op_sz)
12472 curr_insn_type = INSN_T1;
12473 else
12474 curr_insn_type = INSN_T2;
12475 }
12476 /* Handle VCVT. */
12477 else if (opc2 == 0x07 && opc3 == 0x03)
12478 {
12479 if (!dp_op_sz)
12480 curr_insn_type = INSN_T1;
12481 else
12482 curr_insn_type = INSN_T2;
12483 }
12484 else if (opc3 & 0x01)
12485 {
12486 /* Handle VCVT. */
12487 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12488 {
12489 if (!bit (arm_insn_r->arm_insn, 18))
12490 curr_insn_type = INSN_T2;
12491 else
12492 {
12493 if (dp_op_sz)
12494 curr_insn_type = INSN_T1;
12495 else
12496 curr_insn_type = INSN_T2;
12497 }
12498 }
12499 /* Handle VCVT. */
12500 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12501 {
12502 if (dp_op_sz)
12503 curr_insn_type = INSN_T1;
12504 else
12505 curr_insn_type = INSN_T2;
12506 }
12507 /* Handle VCVTB, VCVTT. */
12508 else if ((opc2 & 0x0e) == 0x02)
12509 curr_insn_type = INSN_T2;
12510 /* Handle VCMP, VCMPE. */
12511 else if ((opc2 & 0x0e) == 0x04)
12512 curr_insn_type = INSN_T3;
12513 }
12514 }
12515
12516 switch (curr_insn_type)
12517 {
12518 case INSN_T0:
12519 reg_vd = reg_vd | (bit_d << 4);
12520 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12521 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12522 arm_insn_r->reg_rec_count = 2;
12523 break;
12524
12525 case INSN_T1:
12526 reg_vd = reg_vd | (bit_d << 4);
12527 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12528 arm_insn_r->reg_rec_count = 1;
12529 break;
12530
12531 case INSN_T2:
12532 reg_vd = (reg_vd << 1) | bit_d;
12533 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12534 arm_insn_r->reg_rec_count = 1;
12535 break;
12536
12537 case INSN_T3:
12538 record_buf[0] = ARM_FPSCR_REGNUM;
12539 arm_insn_r->reg_rec_count = 1;
12540 break;
12541
12542 default:
12543 gdb_assert_not_reached ("no decoding pattern found");
12544 break;
12545 }
12546
12547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12548 return 0;
12549 }
12550
12551 /* Handling opcode 110 insns. */
12552
12553 static int
12554 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12555 {
12556 uint32_t op1, op1_ebit, coproc;
12557
12558 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12559 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12560 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12561
12562 if ((coproc & 0x0e) == 0x0a)
12563 {
12564 /* Handle extension register ld/st instructions. */
12565 if (!(op1 & 0x20))
12566 return arm_record_exreg_ld_st_insn (arm_insn_r);
12567
12568 /* 64-bit transfers between arm core and extension registers. */
12569 if ((op1 & 0x3e) == 0x04)
12570 return arm_record_exreg_ld_st_insn (arm_insn_r);
12571 }
12572 else
12573 {
12574 /* Handle coprocessor ld/st instructions. */
12575 if (!(op1 & 0x3a))
12576 {
12577 /* Store. */
12578 if (!op1_ebit)
12579 return arm_record_unsupported_insn (arm_insn_r);
12580 else
12581 /* Load. */
12582 return arm_record_unsupported_insn (arm_insn_r);
12583 }
12584
12585 /* Move to coprocessor from two arm core registers. */
12586 if (op1 == 0x4)
12587 return arm_record_unsupported_insn (arm_insn_r);
12588
12589 /* Move to two arm core registers from coprocessor. */
12590 if (op1 == 0x5)
12591 {
12592 uint32_t reg_t[2];
12593
12594 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12595 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12596 arm_insn_r->reg_rec_count = 2;
12597
12598 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12599 return 0;
12600 }
12601 }
12602 return arm_record_unsupported_insn (arm_insn_r);
12603 }
12604
12605 /* Handling opcode 111 insns. */
12606
12607 static int
12608 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12609 {
12610 uint32_t op, op1_ebit, coproc, bits_24_25;
12611 arm_gdbarch_tdep *tdep
12612 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
12613 struct regcache *reg_cache = arm_insn_r->regcache;
12614
12615 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12616 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12617 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12618 op = bit (arm_insn_r->arm_insn, 4);
12619 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
12620
12621 /* Handle arm SWI/SVC system call instructions. */
12622 if (bits_24_25 == 0x3)
12623 {
12624 if (tdep->arm_syscall_record != NULL)
12625 {
12626 ULONGEST svc_operand, svc_number;
12627
12628 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12629
12630 if (svc_operand) /* OABI. */
12631 svc_number = svc_operand - 0x900000;
12632 else /* EABI. */
12633 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12634
12635 return tdep->arm_syscall_record (reg_cache, svc_number);
12636 }
12637 else
12638 {
12639 gdb_printf (gdb_stderr, _("no syscall record support\n"));
12640 return -1;
12641 }
12642 }
12643 else if (bits_24_25 == 0x02)
12644 {
12645 if (op)
12646 {
12647 if ((coproc & 0x0e) == 0x0a)
12648 {
12649 /* 8, 16, and 32-bit transfer */
12650 return arm_record_vdata_transfer_insn (arm_insn_r);
12651 }
12652 else
12653 {
12654 if (op1_ebit)
12655 {
12656 /* MRC, MRC2 */
12657 uint32_t record_buf[1];
12658
12659 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12660 if (record_buf[0] == 15)
12661 record_buf[0] = ARM_PS_REGNUM;
12662
12663 arm_insn_r->reg_rec_count = 1;
12664 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12665 record_buf);
12666 return 0;
12667 }
12668 else
12669 {
12670 /* MCR, MCR2 */
12671 return -1;
12672 }
12673 }
12674 }
12675 else
12676 {
12677 if ((coproc & 0x0e) == 0x0a)
12678 {
12679 /* VFP data-processing instructions. */
12680 return arm_record_vfp_data_proc_insn (arm_insn_r);
12681 }
12682 else
12683 {
12684 /* CDP, CDP2 */
12685 return -1;
12686 }
12687 }
12688 }
12689 else
12690 {
12691 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
12692
12693 if (op1 == 5)
12694 {
12695 if ((coproc & 0x0e) != 0x0a)
12696 {
12697 /* MRRC, MRRC2 */
12698 return -1;
12699 }
12700 }
12701 else if (op1 == 4 || op1 == 5)
12702 {
12703 if ((coproc & 0x0e) == 0x0a)
12704 {
12705 /* 64-bit transfers between ARM core and extension */
12706 return -1;
12707 }
12708 else if (op1 == 4)
12709 {
12710 /* MCRR, MCRR2 */
12711 return -1;
12712 }
12713 }
12714 else if (op1 == 0 || op1 == 1)
12715 {
12716 /* UNDEFINED */
12717 return -1;
12718 }
12719 else
12720 {
12721 if ((coproc & 0x0e) == 0x0a)
12722 {
12723 /* Extension register load/store */
12724 }
12725 else
12726 {
12727 /* STC, STC2, LDC, LDC2 */
12728 }
12729 return -1;
12730 }
12731 }
12732
12733 return -1;
12734 }
12735
12736 /* Handling opcode 000 insns. */
12737
12738 static int
12739 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12740 {
12741 uint32_t record_buf[8];
12742 uint32_t reg_src1 = 0;
12743
12744 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12745
12746 record_buf[0] = ARM_PS_REGNUM;
12747 record_buf[1] = reg_src1;
12748 thumb_insn_r->reg_rec_count = 2;
12749
12750 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12751
12752 return 0;
12753 }
12754
12755
12756 /* Handling opcode 001 insns. */
12757
12758 static int
12759 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12760 {
12761 uint32_t record_buf[8];
12762 uint32_t reg_src1 = 0;
12763
12764 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12765
12766 record_buf[0] = ARM_PS_REGNUM;
12767 record_buf[1] = reg_src1;
12768 thumb_insn_r->reg_rec_count = 2;
12769
12770 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12771
12772 return 0;
12773 }
12774
12775 /* Handling opcode 010 insns. */
12776
12777 static int
12778 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12779 {
12780 struct regcache *reg_cache = thumb_insn_r->regcache;
12781 uint32_t record_buf[8], record_buf_mem[8];
12782
12783 uint32_t reg_src1 = 0, reg_src2 = 0;
12784 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12785
12786 ULONGEST u_regval[2] = {0};
12787
12788 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12789
12790 if (bit (thumb_insn_r->arm_insn, 12))
12791 {
12792 /* Handle load/store register offset. */
12793 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12794
12795 if (in_inclusive_range (opB, 4U, 7U))
12796 {
12797 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12798 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12799 record_buf[0] = reg_src1;
12800 thumb_insn_r->reg_rec_count = 1;
12801 }
12802 else if (in_inclusive_range (opB, 0U, 2U))
12803 {
12804 /* STR(2), STRB(2), STRH(2) . */
12805 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12806 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12807 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12808 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12809 if (0 == opB)
12810 record_buf_mem[0] = 4; /* STR (2). */
12811 else if (2 == opB)
12812 record_buf_mem[0] = 1; /* STRB (2). */
12813 else if (1 == opB)
12814 record_buf_mem[0] = 2; /* STRH (2). */
12815 record_buf_mem[1] = u_regval[0] + u_regval[1];
12816 thumb_insn_r->mem_rec_count = 1;
12817 }
12818 }
12819 else if (bit (thumb_insn_r->arm_insn, 11))
12820 {
12821 /* Handle load from literal pool. */
12822 /* LDR(3). */
12823 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12824 record_buf[0] = reg_src1;
12825 thumb_insn_r->reg_rec_count = 1;
12826 }
12827 else if (opcode1)
12828 {
12829 /* Special data instructions and branch and exchange */
12830 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12831 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12832 if ((3 == opcode2) && (!opcode3))
12833 {
12834 /* Branch with exchange. */
12835 record_buf[0] = ARM_PS_REGNUM;
12836 thumb_insn_r->reg_rec_count = 1;
12837 }
12838 else
12839 {
12840 /* Format 8; special data processing insns. */
12841 record_buf[0] = ARM_PS_REGNUM;
12842 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
12843 | bits (thumb_insn_r->arm_insn, 0, 2));
12844 thumb_insn_r->reg_rec_count = 2;
12845 }
12846 }
12847 else
12848 {
12849 /* Format 5; data processing insns. */
12850 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12851 if (bit (thumb_insn_r->arm_insn, 7))
12852 {
12853 reg_src1 = reg_src1 + 8;
12854 }
12855 record_buf[0] = ARM_PS_REGNUM;
12856 record_buf[1] = reg_src1;
12857 thumb_insn_r->reg_rec_count = 2;
12858 }
12859
12860 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12861 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12862 record_buf_mem);
12863
12864 return 0;
12865 }
12866
12867 /* Handling opcode 001 insns. */
12868
12869 static int
12870 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12871 {
12872 struct regcache *reg_cache = thumb_insn_r->regcache;
12873 uint32_t record_buf[8], record_buf_mem[8];
12874
12875 uint32_t reg_src1 = 0;
12876 uint32_t opcode = 0, immed_5 = 0;
12877
12878 ULONGEST u_regval = 0;
12879
12880 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12881
12882 if (opcode)
12883 {
12884 /* LDR(1). */
12885 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12886 record_buf[0] = reg_src1;
12887 thumb_insn_r->reg_rec_count = 1;
12888 }
12889 else
12890 {
12891 /* STR(1). */
12892 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12893 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12894 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12895 record_buf_mem[0] = 4;
12896 record_buf_mem[1] = u_regval + (immed_5 * 4);
12897 thumb_insn_r->mem_rec_count = 1;
12898 }
12899
12900 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12901 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12902 record_buf_mem);
12903
12904 return 0;
12905 }
12906
12907 /* Handling opcode 100 insns. */
12908
12909 static int
12910 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12911 {
12912 struct regcache *reg_cache = thumb_insn_r->regcache;
12913 uint32_t record_buf[8], record_buf_mem[8];
12914
12915 uint32_t reg_src1 = 0;
12916 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12917
12918 ULONGEST u_regval = 0;
12919
12920 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12921
12922 if (3 == opcode)
12923 {
12924 /* LDR(4). */
12925 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12926 record_buf[0] = reg_src1;
12927 thumb_insn_r->reg_rec_count = 1;
12928 }
12929 else if (1 == opcode)
12930 {
12931 /* LDRH(1). */
12932 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12933 record_buf[0] = reg_src1;
12934 thumb_insn_r->reg_rec_count = 1;
12935 }
12936 else if (2 == opcode)
12937 {
12938 /* STR(3). */
12939 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12940 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12941 record_buf_mem[0] = 4;
12942 record_buf_mem[1] = u_regval + (immed_8 * 4);
12943 thumb_insn_r->mem_rec_count = 1;
12944 }
12945 else if (0 == opcode)
12946 {
12947 /* STRH(1). */
12948 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12949 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12950 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12951 record_buf_mem[0] = 2;
12952 record_buf_mem[1] = u_regval + (immed_5 * 2);
12953 thumb_insn_r->mem_rec_count = 1;
12954 }
12955
12956 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12957 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12958 record_buf_mem);
12959
12960 return 0;
12961 }
12962
12963 /* Handling opcode 101 insns. */
12964
12965 static int
12966 thumb_record_misc (insn_decode_record *thumb_insn_r)
12967 {
12968 struct regcache *reg_cache = thumb_insn_r->regcache;
12969
12970 uint32_t opcode = 0;
12971 uint32_t register_bits = 0, register_count = 0;
12972 uint32_t index = 0, start_address = 0;
12973 uint32_t record_buf[24], record_buf_mem[48];
12974 uint32_t reg_src1;
12975
12976 ULONGEST u_regval = 0;
12977
12978 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12979
12980 if (opcode == 0 || opcode == 1)
12981 {
12982 /* ADR and ADD (SP plus immediate) */
12983
12984 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12985 record_buf[0] = reg_src1;
12986 thumb_insn_r->reg_rec_count = 1;
12987 }
12988 else
12989 {
12990 /* Miscellaneous 16-bit instructions */
12991 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12992
12993 switch (opcode2)
12994 {
12995 case 6:
12996 /* SETEND and CPS */
12997 break;
12998 case 0:
12999 /* ADD/SUB (SP plus immediate) */
13000 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13001 record_buf[0] = ARM_SP_REGNUM;
13002 thumb_insn_r->reg_rec_count = 1;
13003 break;
13004 case 1: /* fall through */
13005 case 3: /* fall through */
13006 case 9: /* fall through */
13007 case 11:
13008 /* CBNZ, CBZ */
13009 break;
13010 case 2:
13011 /* SXTH, SXTB, UXTH, UXTB */
13012 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13013 thumb_insn_r->reg_rec_count = 1;
13014 break;
13015 case 4: /* fall through */
13016 case 5:
13017 /* PUSH. */
13018 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13019 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13020 while (register_bits)
13021 {
13022 if (register_bits & 0x00000001)
13023 register_count++;
13024 register_bits = register_bits >> 1;
13025 }
13026 start_address = u_regval - \
13027 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13028 thumb_insn_r->mem_rec_count = register_count;
13029 while (register_count)
13030 {
13031 record_buf_mem[(register_count * 2) - 1] = start_address;
13032 record_buf_mem[(register_count * 2) - 2] = 4;
13033 start_address = start_address + 4;
13034 register_count--;
13035 }
13036 record_buf[0] = ARM_SP_REGNUM;
13037 thumb_insn_r->reg_rec_count = 1;
13038 break;
13039 case 10:
13040 /* REV, REV16, REVSH */
13041 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13042 thumb_insn_r->reg_rec_count = 1;
13043 break;
13044 case 12: /* fall through */
13045 case 13:
13046 /* POP. */
13047 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13048 while (register_bits)
13049 {
13050 if (register_bits & 0x00000001)
13051 record_buf[index++] = register_count;
13052 register_bits = register_bits >> 1;
13053 register_count++;
13054 }
13055 record_buf[index++] = ARM_PS_REGNUM;
13056 record_buf[index++] = ARM_SP_REGNUM;
13057 thumb_insn_r->reg_rec_count = index;
13058 break;
13059 case 0xe:
13060 /* BKPT insn. */
13061 /* Handle enhanced software breakpoint insn, BKPT. */
13062 /* CPSR is changed to be executed in ARM state, disabling normal
13063 interrupts, entering abort mode. */
13064 /* According to high vector configuration PC is set. */
13065 /* User hits breakpoint and type reverse, in that case, we need to go back with
13066 previous CPSR and Program Counter. */
13067 record_buf[0] = ARM_PS_REGNUM;
13068 record_buf[1] = ARM_LR_REGNUM;
13069 thumb_insn_r->reg_rec_count = 2;
13070 /* We need to save SPSR value, which is not yet done. */
13071 gdb_printf (gdb_stderr,
13072 _("Process record does not support instruction "
13073 "0x%0x at address %s.\n"),
13074 thumb_insn_r->arm_insn,
13075 paddress (thumb_insn_r->gdbarch,
13076 thumb_insn_r->this_addr));
13077 return -1;
13078
13079 case 0xf:
13080 /* If-Then, and hints */
13081 break;
13082 default:
13083 return -1;
13084 };
13085 }
13086
13087 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13088 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13089 record_buf_mem);
13090
13091 return 0;
13092 }
13093
13094 /* Handling opcode 110 insns. */
13095
13096 static int
13097 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
13098 {
13099 arm_gdbarch_tdep *tdep
13100 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
13101 struct regcache *reg_cache = thumb_insn_r->regcache;
13102
13103 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13104 uint32_t reg_src1 = 0;
13105 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13106 uint32_t index = 0, start_address = 0;
13107 uint32_t record_buf[24], record_buf_mem[48];
13108
13109 ULONGEST u_regval = 0;
13110
13111 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13112 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13113
13114 if (1 == opcode2)
13115 {
13116
13117 /* LDMIA. */
13118 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13119 /* Get Rn. */
13120 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13121 while (register_bits)
13122 {
13123 if (register_bits & 0x00000001)
13124 record_buf[index++] = register_count;
13125 register_bits = register_bits >> 1;
13126 register_count++;
13127 }
13128 record_buf[index++] = reg_src1;
13129 thumb_insn_r->reg_rec_count = index;
13130 }
13131 else if (0 == opcode2)
13132 {
13133 /* It handles both STMIA. */
13134 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13135 /* Get Rn. */
13136 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13137 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13138 while (register_bits)
13139 {
13140 if (register_bits & 0x00000001)
13141 register_count++;
13142 register_bits = register_bits >> 1;
13143 }
13144 start_address = u_regval;
13145 thumb_insn_r->mem_rec_count = register_count;
13146 while (register_count)
13147 {
13148 record_buf_mem[(register_count * 2) - 1] = start_address;
13149 record_buf_mem[(register_count * 2) - 2] = 4;
13150 start_address = start_address + 4;
13151 register_count--;
13152 }
13153 }
13154 else if (0x1F == opcode1)
13155 {
13156 /* Handle arm syscall insn. */
13157 if (tdep->arm_syscall_record != NULL)
13158 {
13159 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13160 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13161 }
13162 else
13163 {
13164 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13165 return -1;
13166 }
13167 }
13168
13169 /* B (1), conditional branch is automatically taken care in process_record,
13170 as PC is saved there. */
13171
13172 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13173 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13174 record_buf_mem);
13175
13176 return ret;
13177 }
13178
13179 /* Handling opcode 111 insns. */
13180
13181 static int
13182 thumb_record_branch (insn_decode_record *thumb_insn_r)
13183 {
13184 uint32_t record_buf[8];
13185 uint32_t bits_h = 0;
13186
13187 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13188
13189 if (2 == bits_h || 3 == bits_h)
13190 {
13191 /* BL */
13192 record_buf[0] = ARM_LR_REGNUM;
13193 thumb_insn_r->reg_rec_count = 1;
13194 }
13195 else if (1 == bits_h)
13196 {
13197 /* BLX(1). */
13198 record_buf[0] = ARM_PS_REGNUM;
13199 record_buf[1] = ARM_LR_REGNUM;
13200 thumb_insn_r->reg_rec_count = 2;
13201 }
13202
13203 /* B(2) is automatically taken care in process_record, as PC is
13204 saved there. */
13205
13206 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13207
13208 return 0;
13209 }
13210
13211 /* Handler for thumb2 load/store multiple instructions. */
13212
13213 static int
13214 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13215 {
13216 struct regcache *reg_cache = thumb2_insn_r->regcache;
13217
13218 uint32_t reg_rn, op;
13219 uint32_t register_bits = 0, register_count = 0;
13220 uint32_t index = 0, start_address = 0;
13221 uint32_t record_buf[24], record_buf_mem[48];
13222
13223 ULONGEST u_regval = 0;
13224
13225 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13226 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13227
13228 if (0 == op || 3 == op)
13229 {
13230 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13231 {
13232 /* Handle RFE instruction. */
13233 record_buf[0] = ARM_PS_REGNUM;
13234 thumb2_insn_r->reg_rec_count = 1;
13235 }
13236 else
13237 {
13238 /* Handle SRS instruction after reading banked SP. */
13239 return arm_record_unsupported_insn (thumb2_insn_r);
13240 }
13241 }
13242 else if (1 == op || 2 == op)
13243 {
13244 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13245 {
13246 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13247 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13248 while (register_bits)
13249 {
13250 if (register_bits & 0x00000001)
13251 record_buf[index++] = register_count;
13252
13253 register_count++;
13254 register_bits = register_bits >> 1;
13255 }
13256 record_buf[index++] = reg_rn;
13257 record_buf[index++] = ARM_PS_REGNUM;
13258 thumb2_insn_r->reg_rec_count = index;
13259 }
13260 else
13261 {
13262 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13263 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13264 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13265 while (register_bits)
13266 {
13267 if (register_bits & 0x00000001)
13268 register_count++;
13269
13270 register_bits = register_bits >> 1;
13271 }
13272
13273 if (1 == op)
13274 {
13275 /* Start address calculation for LDMDB/LDMEA. */
13276 start_address = u_regval;
13277 }
13278 else if (2 == op)
13279 {
13280 /* Start address calculation for LDMDB/LDMEA. */
13281 start_address = u_regval - register_count * 4;
13282 }
13283
13284 thumb2_insn_r->mem_rec_count = register_count;
13285 while (register_count)
13286 {
13287 record_buf_mem[register_count * 2 - 1] = start_address;
13288 record_buf_mem[register_count * 2 - 2] = 4;
13289 start_address = start_address + 4;
13290 register_count--;
13291 }
13292 record_buf[0] = reg_rn;
13293 record_buf[1] = ARM_PS_REGNUM;
13294 thumb2_insn_r->reg_rec_count = 2;
13295 }
13296 }
13297
13298 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13299 record_buf_mem);
13300 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13301 record_buf);
13302 return ARM_RECORD_SUCCESS;
13303 }
13304
13305 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13306 instructions. */
13307
13308 static int
13309 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13310 {
13311 struct regcache *reg_cache = thumb2_insn_r->regcache;
13312
13313 uint32_t reg_rd, reg_rn, offset_imm;
13314 uint32_t reg_dest1, reg_dest2;
13315 uint32_t address, offset_addr;
13316 uint32_t record_buf[8], record_buf_mem[8];
13317 uint32_t op1, op2, op3;
13318
13319 ULONGEST u_regval[2];
13320
13321 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13322 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13323 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13324
13325 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13326 {
13327 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13328 {
13329 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13330 record_buf[0] = reg_dest1;
13331 record_buf[1] = ARM_PS_REGNUM;
13332 thumb2_insn_r->reg_rec_count = 2;
13333 }
13334
13335 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13336 {
13337 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13338 record_buf[2] = reg_dest2;
13339 thumb2_insn_r->reg_rec_count = 3;
13340 }
13341 }
13342 else
13343 {
13344 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13345 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13346
13347 if (0 == op1 && 0 == op2)
13348 {
13349 /* Handle STREX. */
13350 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13351 address = u_regval[0] + (offset_imm * 4);
13352 record_buf_mem[0] = 4;
13353 record_buf_mem[1] = address;
13354 thumb2_insn_r->mem_rec_count = 1;
13355 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13356 record_buf[0] = reg_rd;
13357 thumb2_insn_r->reg_rec_count = 1;
13358 }
13359 else if (1 == op1 && 0 == op2)
13360 {
13361 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13362 record_buf[0] = reg_rd;
13363 thumb2_insn_r->reg_rec_count = 1;
13364 address = u_regval[0];
13365 record_buf_mem[1] = address;
13366
13367 if (4 == op3)
13368 {
13369 /* Handle STREXB. */
13370 record_buf_mem[0] = 1;
13371 thumb2_insn_r->mem_rec_count = 1;
13372 }
13373 else if (5 == op3)
13374 {
13375 /* Handle STREXH. */
13376 record_buf_mem[0] = 2 ;
13377 thumb2_insn_r->mem_rec_count = 1;
13378 }
13379 else if (7 == op3)
13380 {
13381 /* Handle STREXD. */
13382 address = u_regval[0];
13383 record_buf_mem[0] = 4;
13384 record_buf_mem[2] = 4;
13385 record_buf_mem[3] = address + 4;
13386 thumb2_insn_r->mem_rec_count = 2;
13387 }
13388 }
13389 else
13390 {
13391 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13392
13393 if (bit (thumb2_insn_r->arm_insn, 24))
13394 {
13395 if (bit (thumb2_insn_r->arm_insn, 23))
13396 offset_addr = u_regval[0] + (offset_imm * 4);
13397 else
13398 offset_addr = u_regval[0] - (offset_imm * 4);
13399
13400 address = offset_addr;
13401 }
13402 else
13403 address = u_regval[0];
13404
13405 record_buf_mem[0] = 4;
13406 record_buf_mem[1] = address;
13407 record_buf_mem[2] = 4;
13408 record_buf_mem[3] = address + 4;
13409 thumb2_insn_r->mem_rec_count = 2;
13410 record_buf[0] = reg_rn;
13411 thumb2_insn_r->reg_rec_count = 1;
13412 }
13413 }
13414
13415 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13416 record_buf);
13417 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13418 record_buf_mem);
13419 return ARM_RECORD_SUCCESS;
13420 }
13421
13422 /* Handler for thumb2 data processing (shift register and modified immediate)
13423 instructions. */
13424
13425 static int
13426 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13427 {
13428 uint32_t reg_rd, op;
13429 uint32_t record_buf[8];
13430
13431 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13432 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13433
13434 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13435 {
13436 record_buf[0] = ARM_PS_REGNUM;
13437 thumb2_insn_r->reg_rec_count = 1;
13438 }
13439 else
13440 {
13441 record_buf[0] = reg_rd;
13442 record_buf[1] = ARM_PS_REGNUM;
13443 thumb2_insn_r->reg_rec_count = 2;
13444 }
13445
13446 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13447 record_buf);
13448 return ARM_RECORD_SUCCESS;
13449 }
13450
13451 /* Generic handler for thumb2 instructions which effect destination and PS
13452 registers. */
13453
13454 static int
13455 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13456 {
13457 uint32_t reg_rd;
13458 uint32_t record_buf[8];
13459
13460 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13461
13462 record_buf[0] = reg_rd;
13463 record_buf[1] = ARM_PS_REGNUM;
13464 thumb2_insn_r->reg_rec_count = 2;
13465
13466 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13467 record_buf);
13468 return ARM_RECORD_SUCCESS;
13469 }
13470
13471 /* Handler for thumb2 branch and miscellaneous control instructions. */
13472
13473 static int
13474 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13475 {
13476 uint32_t op, op1, op2;
13477 uint32_t record_buf[8];
13478
13479 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13480 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13481 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13482
13483 /* Handle MSR insn. */
13484 if (!(op1 & 0x2) && 0x38 == op)
13485 {
13486 if (!(op2 & 0x3))
13487 {
13488 /* CPSR is going to be changed. */
13489 record_buf[0] = ARM_PS_REGNUM;
13490 thumb2_insn_r->reg_rec_count = 1;
13491 }
13492 else
13493 {
13494 arm_record_unsupported_insn(thumb2_insn_r);
13495 return -1;
13496 }
13497 }
13498 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13499 {
13500 /* BLX. */
13501 record_buf[0] = ARM_PS_REGNUM;
13502 record_buf[1] = ARM_LR_REGNUM;
13503 thumb2_insn_r->reg_rec_count = 2;
13504 }
13505
13506 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13507 record_buf);
13508 return ARM_RECORD_SUCCESS;
13509 }
13510
13511 /* Handler for thumb2 store single data item instructions. */
13512
13513 static int
13514 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13515 {
13516 struct regcache *reg_cache = thumb2_insn_r->regcache;
13517
13518 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13519 uint32_t address, offset_addr;
13520 uint32_t record_buf[8], record_buf_mem[8];
13521 uint32_t op1, op2;
13522
13523 ULONGEST u_regval[2];
13524
13525 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13526 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13527 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13528 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13529
13530 if (bit (thumb2_insn_r->arm_insn, 23))
13531 {
13532 /* T2 encoding. */
13533 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13534 offset_addr = u_regval[0] + offset_imm;
13535 address = offset_addr;
13536 }
13537 else
13538 {
13539 /* T3 encoding. */
13540 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13541 {
13542 /* Handle STRB (register). */
13543 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13544 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13545 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13546 offset_addr = u_regval[1] << shift_imm;
13547 address = u_regval[0] + offset_addr;
13548 }
13549 else
13550 {
13551 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13552 if (bit (thumb2_insn_r->arm_insn, 10))
13553 {
13554 if (bit (thumb2_insn_r->arm_insn, 9))
13555 offset_addr = u_regval[0] + offset_imm;
13556 else
13557 offset_addr = u_regval[0] - offset_imm;
13558
13559 address = offset_addr;
13560 }
13561 else
13562 address = u_regval[0];
13563 }
13564 }
13565
13566 switch (op1)
13567 {
13568 /* Store byte instructions. */
13569 case 4:
13570 case 0:
13571 record_buf_mem[0] = 1;
13572 break;
13573 /* Store half word instructions. */
13574 case 1:
13575 case 5:
13576 record_buf_mem[0] = 2;
13577 break;
13578 /* Store word instructions. */
13579 case 2:
13580 case 6:
13581 record_buf_mem[0] = 4;
13582 break;
13583
13584 default:
13585 gdb_assert_not_reached ("no decoding pattern found");
13586 break;
13587 }
13588
13589 record_buf_mem[1] = address;
13590 thumb2_insn_r->mem_rec_count = 1;
13591 record_buf[0] = reg_rn;
13592 thumb2_insn_r->reg_rec_count = 1;
13593
13594 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13595 record_buf);
13596 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13597 record_buf_mem);
13598 return ARM_RECORD_SUCCESS;
13599 }
13600
13601 /* Handler for thumb2 load memory hints instructions. */
13602
13603 static int
13604 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13605 {
13606 uint32_t record_buf[8];
13607 uint32_t reg_rt, reg_rn;
13608
13609 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13610 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13611
13612 if (ARM_PC_REGNUM != reg_rt)
13613 {
13614 record_buf[0] = reg_rt;
13615 record_buf[1] = reg_rn;
13616 record_buf[2] = ARM_PS_REGNUM;
13617 thumb2_insn_r->reg_rec_count = 3;
13618
13619 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13620 record_buf);
13621 return ARM_RECORD_SUCCESS;
13622 }
13623
13624 return ARM_RECORD_FAILURE;
13625 }
13626
13627 /* Handler for thumb2 load word instructions. */
13628
13629 static int
13630 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13631 {
13632 uint32_t record_buf[8];
13633
13634 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13635 record_buf[1] = ARM_PS_REGNUM;
13636 thumb2_insn_r->reg_rec_count = 2;
13637
13638 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13639 record_buf);
13640 return ARM_RECORD_SUCCESS;
13641 }
13642
13643 /* Handler for thumb2 long multiply, long multiply accumulate, and
13644 divide instructions. */
13645
13646 static int
13647 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13648 {
13649 uint32_t opcode1 = 0, opcode2 = 0;
13650 uint32_t record_buf[8];
13651
13652 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13653 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13654
13655 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13656 {
13657 /* Handle SMULL, UMULL, SMULAL. */
13658 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13659 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13660 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13661 record_buf[2] = ARM_PS_REGNUM;
13662 thumb2_insn_r->reg_rec_count = 3;
13663 }
13664 else if (1 == opcode1 || 3 == opcode2)
13665 {
13666 /* Handle SDIV and UDIV. */
13667 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13668 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13669 record_buf[2] = ARM_PS_REGNUM;
13670 thumb2_insn_r->reg_rec_count = 3;
13671 }
13672 else
13673 return ARM_RECORD_FAILURE;
13674
13675 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13676 record_buf);
13677 return ARM_RECORD_SUCCESS;
13678 }
13679
13680 /* Record handler for thumb32 coprocessor instructions. */
13681
13682 static int
13683 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13684 {
13685 if (bit (thumb2_insn_r->arm_insn, 25))
13686 return arm_record_coproc_data_proc (thumb2_insn_r);
13687 else
13688 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13689 }
13690
13691 /* Record handler for advance SIMD structure load/store instructions. */
13692
13693 static int
13694 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13695 {
13696 struct regcache *reg_cache = thumb2_insn_r->regcache;
13697 uint32_t l_bit, a_bit, b_bits;
13698 uint32_t record_buf[128], record_buf_mem[128];
13699 uint32_t reg_rn, reg_vd, address, f_elem;
13700 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13701 uint8_t f_ebytes;
13702
13703 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13704 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13705 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13706 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13707 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13708 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13709 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13710 f_elem = 8 / f_ebytes;
13711
13712 if (!l_bit)
13713 {
13714 ULONGEST u_regval = 0;
13715 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13716 address = u_regval;
13717
13718 if (!a_bit)
13719 {
13720 /* Handle VST1. */
13721 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13722 {
13723 if (b_bits == 0x07)
13724 bf_regs = 1;
13725 else if (b_bits == 0x0a)
13726 bf_regs = 2;
13727 else if (b_bits == 0x06)
13728 bf_regs = 3;
13729 else if (b_bits == 0x02)
13730 bf_regs = 4;
13731 else
13732 bf_regs = 0;
13733
13734 for (index_r = 0; index_r < bf_regs; index_r++)
13735 {
13736 for (index_e = 0; index_e < f_elem; index_e++)
13737 {
13738 record_buf_mem[index_m++] = f_ebytes;
13739 record_buf_mem[index_m++] = address;
13740 address = address + f_ebytes;
13741 thumb2_insn_r->mem_rec_count += 1;
13742 }
13743 }
13744 }
13745 /* Handle VST2. */
13746 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13747 {
13748 if (b_bits == 0x09 || b_bits == 0x08)
13749 bf_regs = 1;
13750 else if (b_bits == 0x03)
13751 bf_regs = 2;
13752 else
13753 bf_regs = 0;
13754
13755 for (index_r = 0; index_r < bf_regs; index_r++)
13756 for (index_e = 0; index_e < f_elem; index_e++)
13757 {
13758 for (loop_t = 0; loop_t < 2; loop_t++)
13759 {
13760 record_buf_mem[index_m++] = f_ebytes;
13761 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13762 thumb2_insn_r->mem_rec_count += 1;
13763 }
13764 address = address + (2 * f_ebytes);
13765 }
13766 }
13767 /* Handle VST3. */
13768 else if ((b_bits & 0x0e) == 0x04)
13769 {
13770 for (index_e = 0; index_e < f_elem; index_e++)
13771 {
13772 for (loop_t = 0; loop_t < 3; loop_t++)
13773 {
13774 record_buf_mem[index_m++] = f_ebytes;
13775 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13776 thumb2_insn_r->mem_rec_count += 1;
13777 }
13778 address = address + (3 * f_ebytes);
13779 }
13780 }
13781 /* Handle VST4. */
13782 else if (!(b_bits & 0x0e))
13783 {
13784 for (index_e = 0; index_e < f_elem; index_e++)
13785 {
13786 for (loop_t = 0; loop_t < 4; loop_t++)
13787 {
13788 record_buf_mem[index_m++] = f_ebytes;
13789 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13790 thumb2_insn_r->mem_rec_count += 1;
13791 }
13792 address = address + (4 * f_ebytes);
13793 }
13794 }
13795 }
13796 else
13797 {
13798 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13799
13800 if (bft_size == 0x00)
13801 f_ebytes = 1;
13802 else if (bft_size == 0x01)
13803 f_ebytes = 2;
13804 else if (bft_size == 0x02)
13805 f_ebytes = 4;
13806 else
13807 f_ebytes = 0;
13808
13809 /* Handle VST1. */
13810 if (!(b_bits & 0x0b) || b_bits == 0x08)
13811 thumb2_insn_r->mem_rec_count = 1;
13812 /* Handle VST2. */
13813 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13814 thumb2_insn_r->mem_rec_count = 2;
13815 /* Handle VST3. */
13816 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13817 thumb2_insn_r->mem_rec_count = 3;
13818 /* Handle VST4. */
13819 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13820 thumb2_insn_r->mem_rec_count = 4;
13821
13822 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13823 {
13824 record_buf_mem[index_m] = f_ebytes;
13825 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13826 }
13827 }
13828 }
13829 else
13830 {
13831 if (!a_bit)
13832 {
13833 /* Handle VLD1. */
13834 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13835 thumb2_insn_r->reg_rec_count = 1;
13836 /* Handle VLD2. */
13837 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13838 thumb2_insn_r->reg_rec_count = 2;
13839 /* Handle VLD3. */
13840 else if ((b_bits & 0x0e) == 0x04)
13841 thumb2_insn_r->reg_rec_count = 3;
13842 /* Handle VLD4. */
13843 else if (!(b_bits & 0x0e))
13844 thumb2_insn_r->reg_rec_count = 4;
13845 }
13846 else
13847 {
13848 /* Handle VLD1. */
13849 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13850 thumb2_insn_r->reg_rec_count = 1;
13851 /* Handle VLD2. */
13852 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13853 thumb2_insn_r->reg_rec_count = 2;
13854 /* Handle VLD3. */
13855 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13856 thumb2_insn_r->reg_rec_count = 3;
13857 /* Handle VLD4. */
13858 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13859 thumb2_insn_r->reg_rec_count = 4;
13860
13861 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13862 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13863 }
13864 }
13865
13866 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13867 {
13868 record_buf[index_r] = reg_rn;
13869 thumb2_insn_r->reg_rec_count += 1;
13870 }
13871
13872 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13873 record_buf);
13874 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13875 record_buf_mem);
13876 return 0;
13877 }
13878
13879 /* Decodes thumb2 instruction type and invokes its record handler. */
13880
13881 static unsigned int
13882 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13883 {
13884 uint32_t op, op1, op2;
13885
13886 op = bit (thumb2_insn_r->arm_insn, 15);
13887 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13888 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13889
13890 if (op1 == 0x01)
13891 {
13892 if (!(op2 & 0x64 ))
13893 {
13894 /* Load/store multiple instruction. */
13895 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13896 }
13897 else if ((op2 & 0x64) == 0x4)
13898 {
13899 /* Load/store (dual/exclusive) and table branch instruction. */
13900 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13901 }
13902 else if ((op2 & 0x60) == 0x20)
13903 {
13904 /* Data-processing (shifted register). */
13905 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13906 }
13907 else if (op2 & 0x40)
13908 {
13909 /* Co-processor instructions. */
13910 return thumb2_record_coproc_insn (thumb2_insn_r);
13911 }
13912 }
13913 else if (op1 == 0x02)
13914 {
13915 if (op)
13916 {
13917 /* Branches and miscellaneous control instructions. */
13918 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13919 }
13920 else if (op2 & 0x20)
13921 {
13922 /* Data-processing (plain binary immediate) instruction. */
13923 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13924 }
13925 else
13926 {
13927 /* Data-processing (modified immediate). */
13928 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13929 }
13930 }
13931 else if (op1 == 0x03)
13932 {
13933 if (!(op2 & 0x71 ))
13934 {
13935 /* Store single data item. */
13936 return thumb2_record_str_single_data (thumb2_insn_r);
13937 }
13938 else if (!((op2 & 0x71) ^ 0x10))
13939 {
13940 /* Advanced SIMD or structure load/store instructions. */
13941 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13942 }
13943 else if (!((op2 & 0x67) ^ 0x01))
13944 {
13945 /* Load byte, memory hints instruction. */
13946 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13947 }
13948 else if (!((op2 & 0x67) ^ 0x03))
13949 {
13950 /* Load halfword, memory hints instruction. */
13951 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13952 }
13953 else if (!((op2 & 0x67) ^ 0x05))
13954 {
13955 /* Load word instruction. */
13956 return thumb2_record_ld_word (thumb2_insn_r);
13957 }
13958 else if (!((op2 & 0x70) ^ 0x20))
13959 {
13960 /* Data-processing (register) instruction. */
13961 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13962 }
13963 else if (!((op2 & 0x78) ^ 0x30))
13964 {
13965 /* Multiply, multiply accumulate, abs diff instruction. */
13966 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13967 }
13968 else if (!((op2 & 0x78) ^ 0x38))
13969 {
13970 /* Long multiply, long multiply accumulate, and divide. */
13971 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13972 }
13973 else if (op2 & 0x40)
13974 {
13975 /* Co-processor instructions. */
13976 return thumb2_record_coproc_insn (thumb2_insn_r);
13977 }
13978 }
13979
13980 return -1;
13981 }
13982
13983 namespace {
13984 /* Abstract memory reader. */
13985
13986 class abstract_memory_reader
13987 {
13988 public:
13989 /* Read LEN bytes of target memory at address MEMADDR, placing the
13990 results in GDB's memory at BUF. Return true on success. */
13991
13992 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13993 };
13994
13995 /* Instruction reader from real target. */
13996
13997 class instruction_reader : public abstract_memory_reader
13998 {
13999 public:
14000 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14001 {
14002 if (target_read_memory (memaddr, buf, len))
14003 return false;
14004 else
14005 return true;
14006 }
14007 };
14008
14009 } // namespace
14010
14011 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
14012 and positive val on failure. */
14013
14014 static int
14015 extract_arm_insn (abstract_memory_reader& reader,
14016 insn_decode_record *insn_record, uint32_t insn_size)
14017 {
14018 gdb_byte buf[insn_size];
14019
14020 memset (&buf[0], 0, insn_size);
14021
14022 if (!reader.read (insn_record->this_addr, buf, insn_size))
14023 return 1;
14024 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
14025 insn_size,
14026 gdbarch_byte_order_for_code (insn_record->gdbarch));
14027 return 0;
14028 }
14029
14030 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
14031
14032 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14033 dispatch it. */
14034
14035 static int
14036 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
14037 record_type_t record_type, uint32_t insn_size)
14038 {
14039
14040 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14041 instruction. */
14042 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14043 {
14044 arm_record_data_proc_misc_ld_str, /* 000. */
14045 arm_record_data_proc_imm, /* 001. */
14046 arm_record_ld_st_imm_offset, /* 010. */
14047 arm_record_ld_st_reg_offset, /* 011. */
14048 arm_record_ld_st_multiple, /* 100. */
14049 arm_record_b_bl, /* 101. */
14050 arm_record_asimd_vfp_coproc, /* 110. */
14051 arm_record_coproc_data_proc /* 111. */
14052 };
14053
14054 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14055 instruction. */
14056 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14057 { \
14058 thumb_record_shift_add_sub, /* 000. */
14059 thumb_record_add_sub_cmp_mov, /* 001. */
14060 thumb_record_ld_st_reg_offset, /* 010. */
14061 thumb_record_ld_st_imm_offset, /* 011. */
14062 thumb_record_ld_st_stack, /* 100. */
14063 thumb_record_misc, /* 101. */
14064 thumb_record_ldm_stm_swi, /* 110. */
14065 thumb_record_branch /* 111. */
14066 };
14067
14068 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14069 uint32_t insn_id = 0;
14070
14071 if (extract_arm_insn (reader, arm_record, insn_size))
14072 {
14073 if (record_debug)
14074 {
14075 gdb_printf (gdb_stdlog,
14076 _("Process record: error reading memory at "
14077 "addr %s len = %d.\n"),
14078 paddress (arm_record->gdbarch,
14079 arm_record->this_addr), insn_size);
14080 }
14081 return -1;
14082 }
14083 else if (ARM_RECORD == record_type)
14084 {
14085 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14086 insn_id = bits (arm_record->arm_insn, 25, 27);
14087
14088 if (arm_record->cond == 0xf)
14089 ret = arm_record_extension_space (arm_record);
14090 else
14091 {
14092 /* If this insn has fallen into extension space
14093 then we need not decode it anymore. */
14094 ret = arm_handle_insn[insn_id] (arm_record);
14095 }
14096 if (ret != ARM_RECORD_SUCCESS)
14097 {
14098 arm_record_unsupported_insn (arm_record);
14099 ret = -1;
14100 }
14101 }
14102 else if (THUMB_RECORD == record_type)
14103 {
14104 /* As thumb does not have condition codes, we set negative. */
14105 arm_record->cond = -1;
14106 insn_id = bits (arm_record->arm_insn, 13, 15);
14107 ret = thumb_handle_insn[insn_id] (arm_record);
14108 if (ret != ARM_RECORD_SUCCESS)
14109 {
14110 arm_record_unsupported_insn (arm_record);
14111 ret = -1;
14112 }
14113 }
14114 else if (THUMB2_RECORD == record_type)
14115 {
14116 /* As thumb does not have condition codes, we set negative. */
14117 arm_record->cond = -1;
14118
14119 /* Swap first half of 32bit thumb instruction with second half. */
14120 arm_record->arm_insn
14121 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14122
14123 ret = thumb2_record_decode_insn_handler (arm_record);
14124
14125 if (ret != ARM_RECORD_SUCCESS)
14126 {
14127 arm_record_unsupported_insn (arm_record);
14128 ret = -1;
14129 }
14130 }
14131 else
14132 {
14133 /* Throw assertion. */
14134 gdb_assert_not_reached ("not a valid instruction, could not decode");
14135 }
14136
14137 return ret;
14138 }
14139
14140 #if GDB_SELF_TEST
14141 namespace selftests {
14142
14143 /* Provide both 16-bit and 32-bit thumb instructions. */
14144
14145 class instruction_reader_thumb : public abstract_memory_reader
14146 {
14147 public:
14148 template<size_t SIZE>
14149 instruction_reader_thumb (enum bfd_endian endian,
14150 const uint16_t (&insns)[SIZE])
14151 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
14152 {}
14153
14154 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14155 {
14156 SELF_CHECK (len == 4 || len == 2);
14157 SELF_CHECK (memaddr % 2 == 0);
14158 SELF_CHECK ((memaddr / 2) < m_insns_size);
14159
14160 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
14161 if (len == 4)
14162 {
14163 store_unsigned_integer (&buf[2], 2, m_endian,
14164 m_insns[memaddr / 2 + 1]);
14165 }
14166 return true;
14167 }
14168
14169 private:
14170 enum bfd_endian m_endian;
14171 const uint16_t *m_insns;
14172 size_t m_insns_size;
14173 };
14174
14175 static void
14176 arm_record_test (void)
14177 {
14178 struct gdbarch_info info;
14179 info.bfd_arch_info = bfd_scan_arch ("arm");
14180
14181 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14182
14183 SELF_CHECK (gdbarch != NULL);
14184
14185 /* 16-bit Thumb instructions. */
14186 {
14187 insn_decode_record arm_record;
14188
14189 memset (&arm_record, 0, sizeof (insn_decode_record));
14190 arm_record.gdbarch = gdbarch;
14191
14192 static const uint16_t insns[] = {
14193 /* db b2 uxtb r3, r3 */
14194 0xb2db,
14195 /* cd 58 ldr r5, [r1, r3] */
14196 0x58cd,
14197 };
14198
14199 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14200 instruction_reader_thumb reader (endian, insns);
14201 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14202 THUMB_INSN_SIZE_BYTES);
14203
14204 SELF_CHECK (ret == 0);
14205 SELF_CHECK (arm_record.mem_rec_count == 0);
14206 SELF_CHECK (arm_record.reg_rec_count == 1);
14207 SELF_CHECK (arm_record.arm_regs[0] == 3);
14208
14209 arm_record.this_addr += 2;
14210 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14211 THUMB_INSN_SIZE_BYTES);
14212
14213 SELF_CHECK (ret == 0);
14214 SELF_CHECK (arm_record.mem_rec_count == 0);
14215 SELF_CHECK (arm_record.reg_rec_count == 1);
14216 SELF_CHECK (arm_record.arm_regs[0] == 5);
14217 }
14218
14219 /* 32-bit Thumb-2 instructions. */
14220 {
14221 insn_decode_record arm_record;
14222
14223 memset (&arm_record, 0, sizeof (insn_decode_record));
14224 arm_record.gdbarch = gdbarch;
14225
14226 static const uint16_t insns[] = {
14227 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
14228 0xee1d, 0x7f70,
14229 };
14230
14231 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14232 instruction_reader_thumb reader (endian, insns);
14233 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14234 THUMB2_INSN_SIZE_BYTES);
14235
14236 SELF_CHECK (ret == 0);
14237 SELF_CHECK (arm_record.mem_rec_count == 0);
14238 SELF_CHECK (arm_record.reg_rec_count == 1);
14239 SELF_CHECK (arm_record.arm_regs[0] == 7);
14240 }
14241 }
14242
14243 /* Instruction reader from manually cooked instruction sequences. */
14244
14245 class test_arm_instruction_reader : public arm_instruction_reader
14246 {
14247 public:
14248 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14249 : m_insns (insns)
14250 {}
14251
14252 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14253 {
14254 SELF_CHECK (memaddr % 4 == 0);
14255 SELF_CHECK (memaddr / 4 < m_insns.size ());
14256
14257 return m_insns[memaddr / 4];
14258 }
14259
14260 private:
14261 const gdb::array_view<const uint32_t> m_insns;
14262 };
14263
14264 static void
14265 arm_analyze_prologue_test ()
14266 {
14267 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14268 {
14269 struct gdbarch_info info;
14270 info.byte_order = endianness;
14271 info.byte_order_for_code = endianness;
14272 info.bfd_arch_info = bfd_scan_arch ("arm");
14273
14274 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14275
14276 SELF_CHECK (gdbarch != NULL);
14277
14278 /* The "sub" instruction contains an immediate value rotate count of 0,
14279 which resulted in a 32-bit shift of a 32-bit value, caught by
14280 UBSan. */
14281 const uint32_t insns[] = {
14282 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14283 0xe1a05000, /* mov r5, r0 */
14284 0xe5903020, /* ldr r3, [r0, #32] */
14285 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14286 };
14287
14288 test_arm_instruction_reader mem_reader (insns);
14289 arm_prologue_cache cache;
14290 arm_cache_init (&cache, gdbarch);
14291
14292 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14293 }
14294 }
14295
14296 } // namespace selftests
14297 #endif /* GDB_SELF_TEST */
14298
14299 /* Cleans up local record registers and memory allocations. */
14300
14301 static void
14302 deallocate_reg_mem (insn_decode_record *record)
14303 {
14304 xfree (record->arm_regs);
14305 xfree (record->arm_mems);
14306 }
14307
14308
14309 /* Parse the current instruction and record the values of the registers and
14310 memory that will be changed in current instruction to record_arch_list".
14311 Return -1 if something is wrong. */
14312
14313 int
14314 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14315 CORE_ADDR insn_addr)
14316 {
14317
14318 uint32_t no_of_rec = 0;
14319 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14320 ULONGEST t_bit = 0, insn_id = 0;
14321
14322 ULONGEST u_regval = 0;
14323
14324 insn_decode_record arm_record;
14325
14326 memset (&arm_record, 0, sizeof (insn_decode_record));
14327 arm_record.regcache = regcache;
14328 arm_record.this_addr = insn_addr;
14329 arm_record.gdbarch = gdbarch;
14330
14331
14332 if (record_debug > 1)
14333 {
14334 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14335 "addr = %s\n",
14336 paddress (gdbarch, arm_record.this_addr));
14337 }
14338
14339 instruction_reader reader;
14340 if (extract_arm_insn (reader, &arm_record, 2))
14341 {
14342 if (record_debug)
14343 {
14344 gdb_printf (gdb_stdlog,
14345 _("Process record: error reading memory at "
14346 "addr %s len = %d.\n"),
14347 paddress (arm_record.gdbarch,
14348 arm_record.this_addr), 2);
14349 }
14350 return -1;
14351 }
14352
14353 /* Check the insn, whether it is thumb or arm one. */
14354
14355 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14356 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14357
14358
14359 if (!(u_regval & t_bit))
14360 {
14361 /* We are decoding arm insn. */
14362 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14363 }
14364 else
14365 {
14366 insn_id = bits (arm_record.arm_insn, 11, 15);
14367 /* is it thumb2 insn? */
14368 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14369 {
14370 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14371 THUMB2_INSN_SIZE_BYTES);
14372 }
14373 else
14374 {
14375 /* We are decoding thumb insn. */
14376 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14377 THUMB_INSN_SIZE_BYTES);
14378 }
14379 }
14380
14381 if (0 == ret)
14382 {
14383 /* Record registers. */
14384 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14385 if (arm_record.arm_regs)
14386 {
14387 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14388 {
14389 if (record_full_arch_list_add_reg
14390 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14391 ret = -1;
14392 }
14393 }
14394 /* Record memories. */
14395 if (arm_record.arm_mems)
14396 {
14397 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14398 {
14399 if (record_full_arch_list_add_mem
14400 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14401 arm_record.arm_mems[no_of_rec].len))
14402 ret = -1;
14403 }
14404 }
14405
14406 if (record_full_arch_list_add_end ())
14407 ret = -1;
14408 }
14409
14410
14411 deallocate_reg_mem (&arm_record);
14412
14413 return ret;
14414 }
14415
14416 /* See arm-tdep.h. */
14417
14418 const target_desc *
14419 arm_read_description (arm_fp_type fp_type)
14420 {
14421 struct target_desc *tdesc = tdesc_arm_list[fp_type];
14422
14423 if (tdesc == nullptr)
14424 {
14425 tdesc = arm_create_target_description (fp_type);
14426 tdesc_arm_list[fp_type] = tdesc;
14427 }
14428
14429 return tdesc;
14430 }
14431
14432 /* See arm-tdep.h. */
14433
14434 const target_desc *
14435 arm_read_mprofile_description (arm_m_profile_type m_type)
14436 {
14437 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14438
14439 if (tdesc == nullptr)
14440 {
14441 tdesc = arm_create_mprofile_target_description (m_type);
14442 tdesc_arm_mprofile_list[m_type] = tdesc;
14443 }
14444
14445 return tdesc;
14446 }