Reformat gdbarch-components.py to fix deviations
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2.h"
42 #include "dwarf2/frame.h"
43 #include "gdbtypes.h"
44 #include "prologue-value.h"
45 #include "remote.h"
46 #include "target-descriptions.h"
47 #include "user-regs.h"
48 #include "observable.h"
49 #include "count-one-bits.h"
50
51 #include "arch/arm.h"
52 #include "arch/arm-get-next-pcs.h"
53 #include "arm-tdep.h"
54 #include "gdb/sim-arm.h"
55
56 #include "elf-bfd.h"
57 #include "coff/internal.h"
58 #include "elf/arm.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "producer.h"
65
66 #if GDB_SELF_TEST
67 #include "gdbsupport/selftest.h"
68 #endif
69
70 static bool arm_debug;
71
72 /* Print an "arm" debug statement. */
73
74 #define arm_debug_printf(fmt, ...) \
75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76
77 /* Macros for setting and testing a bit in a minimal symbol that marks
78 it as Thumb function. The MSB of the minimal symbol's "info" field
79 is used for this purpose.
80
81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83
84 #define MSYMBOL_SET_SPECIAL(msym) \
85 (msym)->set_target_flag_1 (true)
86
87 #define MSYMBOL_IS_SPECIAL(msym) \
88 (msym)->target_flag_1 ()
89
90 struct arm_mapping_symbol
91 {
92 CORE_ADDR value;
93 char type;
94
95 bool operator< (const arm_mapping_symbol &other) const
96 { return this->value < other.value; }
97 };
98
99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
100
101 struct arm_per_bfd
102 {
103 explicit arm_per_bfd (size_t num_sections)
104 : section_maps (new arm_mapping_symbol_vec[num_sections]),
105 section_maps_sorted (new bool[num_sections] ())
106 {}
107
108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109
110 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111
112 The format is an array of vectors of arm_mapping_symbols, there is one
113 vector for each section of the objfile (the array is index by BFD section
114 index).
115
116 For each section, the vector of arm_mapping_symbol is sorted by
117 symbol value (address). */
118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119
120 /* For each corresponding element of section_maps above, is this vector
121 sorted. */
122 std::unique_ptr<bool[]> section_maps_sorted;
123 };
124
125 /* Per-bfd data used for mapping symbols. */
126 static bfd_key<arm_per_bfd> arm_bfd_data_key;
127
128 /* The list of available "set arm ..." and "show arm ..." commands. */
129 static struct cmd_list_element *setarmcmdlist = NULL;
130 static struct cmd_list_element *showarmcmdlist = NULL;
131
132 /* The type of floating-point to use. Keep this in sync with enum
133 arm_float_model, and the help string in _initialize_arm_tdep. */
134 static const char *const fp_model_strings[] =
135 {
136 "auto",
137 "softfpa",
138 "fpa",
139 "softvfp",
140 "vfp",
141 NULL
142 };
143
144 /* A variable that can be configured by the user. */
145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
146 static const char *current_fp_model = "auto";
147
148 /* The ABI to use. Keep this in sync with arm_abi_kind. */
149 static const char *const arm_abi_strings[] =
150 {
151 "auto",
152 "APCS",
153 "AAPCS",
154 NULL
155 };
156
157 /* A variable that can be configured by the user. */
158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
159 static const char *arm_abi_string = "auto";
160
161 /* The execution mode to assume. */
162 static const char *const arm_mode_strings[] =
163 {
164 "auto",
165 "arm",
166 "thumb",
167 NULL
168 };
169
170 static const char *arm_fallback_mode_string = "auto";
171 static const char *arm_force_mode_string = "auto";
172
173 /* The standard register names, and all the valid aliases for them. Note
174 that `fp', `sp' and `pc' are not added in this alias list, because they
175 have been added as builtin user registers in
176 std-regs.c:_initialize_frame_reg. */
177 static const struct
178 {
179 const char *name;
180 int regnum;
181 } arm_register_aliases[] = {
182 /* Basic register numbers. */
183 { "r0", 0 },
184 { "r1", 1 },
185 { "r2", 2 },
186 { "r3", 3 },
187 { "r4", 4 },
188 { "r5", 5 },
189 { "r6", 6 },
190 { "r7", 7 },
191 { "r8", 8 },
192 { "r9", 9 },
193 { "r10", 10 },
194 { "r11", 11 },
195 { "r12", 12 },
196 { "r13", 13 },
197 { "r14", 14 },
198 { "r15", 15 },
199 /* Synonyms (argument and variable registers). */
200 { "a1", 0 },
201 { "a2", 1 },
202 { "a3", 2 },
203 { "a4", 3 },
204 { "v1", 4 },
205 { "v2", 5 },
206 { "v3", 6 },
207 { "v4", 7 },
208 { "v5", 8 },
209 { "v6", 9 },
210 { "v7", 10 },
211 { "v8", 11 },
212 /* Other platform-specific names for r9. */
213 { "sb", 9 },
214 { "tr", 9 },
215 /* Special names. */
216 { "ip", 12 },
217 { "lr", 14 },
218 /* Names used by GCC (not listed in the ARM EABI). */
219 { "sl", 10 },
220 /* A special name from the older ATPCS. */
221 { "wr", 7 },
222 };
223
224 static const char *const arm_register_names[] =
225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
231 "fps", "cpsr" }; /* 24 25 */
232
233 /* Holds the current set of options to be passed to the disassembler. */
234 static char *arm_disassembler_options;
235
236 /* Valid register name styles. */
237 static const char **valid_disassembly_styles;
238
239 /* Disassembly style to use. Default to "std" register names. */
240 static const char *disassembly_style;
241
242 /* All possible arm target descriptors. */
243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245
246 /* This is used to keep the bfd arch_info in sync with the disassembly
247 style. */
248 static void set_disassembly_style_sfunc (const char *, int,
249 struct cmd_list_element *);
250 static void show_disassembly_style_sfunc (struct ui_file *, int,
251 struct cmd_list_element *,
252 const char *);
253
254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
255 readable_regcache *regcache,
256 int regnum, gdb_byte *buf);
257 static void arm_neon_quad_write (struct gdbarch *gdbarch,
258 struct regcache *regcache,
259 int regnum, const gdb_byte *buf);
260
261 static CORE_ADDR
262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
263
264
265 /* get_next_pcs operations. */
266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
267 arm_get_next_pcs_read_memory_unsigned_integer,
268 arm_get_next_pcs_syscall_next_pc,
269 arm_get_next_pcs_addr_bits_remove,
270 arm_get_next_pcs_is_thumb,
271 NULL,
272 };
273
274 struct arm_prologue_cache
275 {
276 /* The stack pointer at the time this frame was created; i.e. the
277 caller's stack pointer when this function was called. It is used
278 to identify this frame. */
279 CORE_ADDR sp;
280
281 /* Additional stack pointers used by M-profile with Security extension. */
282 /* Use msp_s / psp_s to hold the values of msp / psp when there is
283 no Security extension. */
284 CORE_ADDR msp_s;
285 CORE_ADDR msp_ns;
286 CORE_ADDR psp_s;
287 CORE_ADDR psp_ns;
288
289 /* Active stack pointer. */
290 int active_sp_regnum;
291 int active_msp_regnum;
292 int active_psp_regnum;
293
294 /* The frame base for this frame is just prev_sp - frame size.
295 FRAMESIZE is the distance from the frame pointer to the
296 initial stack pointer. */
297
298 int framesize;
299
300 /* The register used to hold the frame pointer for this frame. */
301 int framereg;
302
303 /* True if the return address is signed, false otherwise. */
304 gdb::optional<bool> ra_signed_state;
305
306 /* Saved register offsets. */
307 trad_frame_saved_reg *saved_regs;
308
309 arm_prologue_cache() = default;
310 };
311
312
313 /* Reconstruct T bit in program status register from LR value. */
314
315 static inline ULONGEST
316 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
317 {
318 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
319 if (IS_THUMB_ADDR (lr))
320 psr |= t_bit;
321 else
322 psr &= ~t_bit;
323
324 return psr;
325 }
326
327 /* Initialize stack pointers, and flag the active one. */
328
329 static inline void
330 arm_cache_init_sp (int regnum, CORE_ADDR* member,
331 struct arm_prologue_cache *cache,
332 struct frame_info *frame)
333 {
334 CORE_ADDR val = get_frame_register_unsigned (frame, regnum);
335 if (val == cache->sp)
336 cache->active_sp_regnum = regnum;
337
338 *member = val;
339 }
340
341 /* Initialize CACHE fields for which zero is not adequate (CACHE is
342 expected to have been ZALLOC'ed before calling this function). */
343
344 static void
345 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
346 {
347 cache->active_sp_regnum = ARM_SP_REGNUM;
348
349 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
350 }
351
352 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
353
354 static void
355 arm_cache_init (struct arm_prologue_cache *cache, struct frame_info *frame)
356 {
357 struct gdbarch *gdbarch = get_frame_arch (frame);
358 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
359
360 arm_cache_init (cache, gdbarch);
361 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
362
363 if (tdep->have_sec_ext)
364 {
365 CORE_ADDR msp_val = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
366 CORE_ADDR psp_val = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
367
368 arm_cache_init_sp (tdep->m_profile_msp_s_regnum, &cache->msp_s, cache, frame);
369 arm_cache_init_sp (tdep->m_profile_psp_s_regnum, &cache->psp_s, cache, frame);
370 arm_cache_init_sp (tdep->m_profile_msp_ns_regnum, &cache->msp_ns, cache, frame);
371 arm_cache_init_sp (tdep->m_profile_psp_ns_regnum, &cache->psp_ns, cache, frame);
372
373 if (msp_val == cache->msp_s)
374 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
375 else if (msp_val == cache->msp_ns)
376 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
377 if (psp_val == cache->psp_s)
378 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
379 else if (psp_val == cache->psp_ns)
380 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
381
382 /* Use MSP_S as default stack pointer. */
383 if (cache->active_sp_regnum == ARM_SP_REGNUM)
384 cache->active_sp_regnum = tdep->m_profile_msp_s_regnum;
385 }
386 else if (tdep->is_m)
387 {
388 arm_cache_init_sp (tdep->m_profile_msp_regnum, &cache->msp_s, cache, frame);
389 arm_cache_init_sp (tdep->m_profile_psp_regnum, &cache->psp_s, cache, frame);
390 }
391 else
392 arm_cache_init_sp (ARM_SP_REGNUM, &cache->msp_s, cache, frame);
393 }
394
395 /* Return the requested stack pointer value (in REGNUM), taking into
396 account whether we have a Security extension or an M-profile
397 CPU. */
398
399 static CORE_ADDR
400 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
401 arm_gdbarch_tdep *tdep, int regnum)
402 {
403 if (tdep->have_sec_ext)
404 {
405 if (regnum == tdep->m_profile_msp_s_regnum)
406 return cache->msp_s;
407 if (regnum == tdep->m_profile_msp_ns_regnum)
408 return cache->msp_ns;
409 if (regnum == tdep->m_profile_psp_s_regnum)
410 return cache->psp_s;
411 if (regnum == tdep->m_profile_psp_ns_regnum)
412 return cache->psp_ns;
413 if (regnum == tdep->m_profile_msp_regnum)
414 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
415 if (regnum == tdep->m_profile_psp_regnum)
416 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
417 if (regnum == ARM_SP_REGNUM)
418 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
419 }
420 else if (tdep->is_m)
421 {
422 if (regnum == tdep->m_profile_msp_regnum)
423 return cache->msp_s;
424 if (regnum == tdep->m_profile_psp_regnum)
425 return cache->psp_s;
426 if (regnum == ARM_SP_REGNUM)
427 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
428 }
429 else if (regnum == ARM_SP_REGNUM)
430 return cache->sp;
431
432 gdb_assert_not_reached ("Invalid SP selection");
433 }
434
435 /* Return the previous stack address, depending on which SP register
436 is active. */
437
438 static CORE_ADDR
439 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
440 {
441 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
442 return val;
443 }
444
445 /* Set the active stack pointer to VAL. */
446
447 static void
448 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
449 arm_gdbarch_tdep *tdep, CORE_ADDR val)
450 {
451 if (tdep->have_sec_ext)
452 {
453 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
454 cache->msp_s = val;
455 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
456 cache->msp_ns = val;
457 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
458 cache->psp_s = val;
459 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
460 cache->psp_ns = val;
461
462 return;
463 }
464 else if (tdep->is_m)
465 {
466 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
467 cache->msp_s = val;
468 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
469 cache->psp_s = val;
470
471 return;
472 }
473 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
474 {
475 cache->sp = val;
476 return;
477 }
478
479 gdb_assert_not_reached ("Invalid SP selection");
480 }
481
482 /* Return true if REGNUM is one of the stack pointers. */
483
484 static bool
485 arm_cache_is_sp_register (struct arm_prologue_cache *cache,
486 arm_gdbarch_tdep *tdep, int regnum)
487 {
488 if ((regnum == ARM_SP_REGNUM)
489 || (regnum == tdep->m_profile_msp_regnum)
490 || (regnum == tdep->m_profile_msp_s_regnum)
491 || (regnum == tdep->m_profile_msp_ns_regnum)
492 || (regnum == tdep->m_profile_psp_regnum)
493 || (regnum == tdep->m_profile_psp_s_regnum)
494 || (regnum == tdep->m_profile_psp_ns_regnum))
495 return true;
496 else
497 return false;
498 }
499
500 /* Set the active stack pointer to SP_REGNUM. */
501
502 static void
503 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
504 arm_gdbarch_tdep *tdep, int sp_regnum)
505 {
506 gdb_assert (sp_regnum != ARM_SP_REGNUM);
507 gdb_assert (arm_cache_is_sp_register (cache, tdep, sp_regnum));
508
509 if (tdep->have_sec_ext)
510 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
511 && sp_regnum != tdep->m_profile_psp_regnum);
512
513 cache->active_sp_regnum = sp_regnum;
514 }
515
516 namespace {
517
518 /* Abstract class to read ARM instructions from memory. */
519
520 class arm_instruction_reader
521 {
522 public:
523 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
524 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
525 };
526
527 /* Read instructions from target memory. */
528
529 class target_arm_instruction_reader : public arm_instruction_reader
530 {
531 public:
532 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
533 {
534 return read_code_unsigned_integer (memaddr, 4, byte_order);
535 }
536 };
537
538 } /* namespace */
539
540 static CORE_ADDR arm_analyze_prologue
541 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
542 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
543
544 /* Architecture version for displaced stepping. This effects the behaviour of
545 certain instructions, and really should not be hard-wired. */
546
547 #define DISPLACED_STEPPING_ARCH_VERSION 5
548
549 /* See arm-tdep.h. */
550
551 bool arm_apcs_32 = true;
552 bool arm_unwind_secure_frames = true;
553
554 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
555
556 int
557 arm_psr_thumb_bit (struct gdbarch *gdbarch)
558 {
559 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
560
561 if (tdep->is_m)
562 return XPSR_T;
563 else
564 return CPSR_T;
565 }
566
567 /* Determine if the processor is currently executing in Thumb mode. */
568
569 int
570 arm_is_thumb (struct regcache *regcache)
571 {
572 ULONGEST cpsr;
573 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
574
575 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
576
577 return (cpsr & t_bit) != 0;
578 }
579
580 /* Determine if FRAME is executing in Thumb mode. */
581
582 int
583 arm_frame_is_thumb (struct frame_info *frame)
584 {
585 CORE_ADDR cpsr;
586 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
587
588 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
589 directly (from a signal frame or dummy frame) or by interpreting
590 the saved LR (from a prologue or DWARF frame). So consult it and
591 trust the unwinders. */
592 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
593
594 return (cpsr & t_bit) != 0;
595 }
596
597 /* Search for the mapping symbol covering MEMADDR. If one is found,
598 return its type. Otherwise, return 0. If START is non-NULL,
599 set *START to the location of the mapping symbol. */
600
601 static char
602 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
603 {
604 struct obj_section *sec;
605
606 /* If there are mapping symbols, consult them. */
607 sec = find_pc_section (memaddr);
608 if (sec != NULL)
609 {
610 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
611 if (data != NULL)
612 {
613 unsigned int section_idx = sec->the_bfd_section->index;
614 arm_mapping_symbol_vec &map
615 = data->section_maps[section_idx];
616
617 /* Sort the vector on first use. */
618 if (!data->section_maps_sorted[section_idx])
619 {
620 std::sort (map.begin (), map.end ());
621 data->section_maps_sorted[section_idx] = true;
622 }
623
624 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
625 arm_mapping_symbol_vec::const_iterator it
626 = std::lower_bound (map.begin (), map.end (), map_key);
627
628 /* std::lower_bound finds the earliest ordered insertion
629 point. If the symbol at this position starts at this exact
630 address, we use that; otherwise, the preceding
631 mapping symbol covers this address. */
632 if (it < map.end ())
633 {
634 if (it->value == map_key.value)
635 {
636 if (start)
637 *start = it->value + sec->addr ();
638 return it->type;
639 }
640 }
641
642 if (it > map.begin ())
643 {
644 arm_mapping_symbol_vec::const_iterator prev_it
645 = it - 1;
646
647 if (start)
648 *start = prev_it->value + sec->addr ();
649 return prev_it->type;
650 }
651 }
652 }
653
654 return 0;
655 }
656
657 /* Determine if the program counter specified in MEMADDR is in a Thumb
658 function. This function should be called for addresses unrelated to
659 any executing frame; otherwise, prefer arm_frame_is_thumb. */
660
661 int
662 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
663 {
664 struct bound_minimal_symbol sym;
665 char type;
666 arm_displaced_step_copy_insn_closure *dsc = nullptr;
667 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
668
669 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
670 dsc = ((arm_displaced_step_copy_insn_closure * )
671 gdbarch_displaced_step_copy_insn_closure_by_addr
672 (gdbarch, current_inferior (), memaddr));
673
674 /* If checking the mode of displaced instruction in copy area, the mode
675 should be determined by instruction on the original address. */
676 if (dsc)
677 {
678 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
679 (unsigned long) dsc->insn_addr,
680 (unsigned long) memaddr);
681 memaddr = dsc->insn_addr;
682 }
683
684 /* If bit 0 of the address is set, assume this is a Thumb address. */
685 if (IS_THUMB_ADDR (memaddr))
686 return 1;
687
688 /* If the user wants to override the symbol table, let him. */
689 if (strcmp (arm_force_mode_string, "arm") == 0)
690 return 0;
691 if (strcmp (arm_force_mode_string, "thumb") == 0)
692 return 1;
693
694 /* ARM v6-M and v7-M are always in Thumb mode. */
695 if (tdep->is_m)
696 return 1;
697
698 /* If there are mapping symbols, consult them. */
699 type = arm_find_mapping_symbol (memaddr, NULL);
700 if (type)
701 return type == 't';
702
703 /* Thumb functions have a "special" bit set in minimal symbols. */
704 sym = lookup_minimal_symbol_by_pc (memaddr);
705 if (sym.minsym)
706 return (MSYMBOL_IS_SPECIAL (sym.minsym));
707
708 /* If the user wants to override the fallback mode, let them. */
709 if (strcmp (arm_fallback_mode_string, "arm") == 0)
710 return 0;
711 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
712 return 1;
713
714 /* If we couldn't find any symbol, but we're talking to a running
715 target, then trust the current value of $cpsr. This lets
716 "display/i $pc" always show the correct mode (though if there is
717 a symbol table we will not reach here, so it still may not be
718 displayed in the mode it will be executed). */
719 if (target_has_registers ())
720 return arm_frame_is_thumb (get_current_frame ());
721
722 /* Otherwise we're out of luck; we assume ARM. */
723 return 0;
724 }
725
726 /* Determine if the address specified equals any of these magic return
727 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
728 architectures.
729
730 From ARMv6-M Reference Manual B1.5.8
731 Table B1-5 Exception return behavior
732
733 EXC_RETURN Return To Return Stack
734 0xFFFFFFF1 Handler mode Main
735 0xFFFFFFF9 Thread mode Main
736 0xFFFFFFFD Thread mode Process
737
738 From ARMv7-M Reference Manual B1.5.8
739 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
740
741 EXC_RETURN Return To Return Stack
742 0xFFFFFFF1 Handler mode Main
743 0xFFFFFFF9 Thread mode Main
744 0xFFFFFFFD Thread mode Process
745
746 Table B1-9 EXC_RETURN definition of exception return behavior, with
747 FP
748
749 EXC_RETURN Return To Return Stack Frame Type
750 0xFFFFFFE1 Handler mode Main Extended
751 0xFFFFFFE9 Thread mode Main Extended
752 0xFFFFFFED Thread mode Process Extended
753 0xFFFFFFF1 Handler mode Main Basic
754 0xFFFFFFF9 Thread mode Main Basic
755 0xFFFFFFFD Thread mode Process Basic
756
757 For more details see "B1.5.8 Exception return behavior"
758 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
759
760 In the ARMv8-M Architecture Technical Reference also adds
761 for implementations without the Security Extension:
762
763 EXC_RETURN Condition
764 0xFFFFFFB0 Return to Handler mode.
765 0xFFFFFFB8 Return to Thread mode using the main stack.
766 0xFFFFFFBC Return to Thread mode using the process stack. */
767
768 static int
769 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
770 {
771 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
772 if (tdep->have_sec_ext)
773 {
774 switch ((addr & 0xff000000))
775 {
776 case 0xff000000: /* EXC_RETURN pattern. */
777 case 0xfe000000: /* FNC_RETURN pattern. */
778 return 1;
779 default:
780 return 0;
781 }
782 }
783 else
784 {
785 switch (addr)
786 {
787 /* Values from ARMv8-M Architecture Technical Reference. */
788 case 0xffffffb0:
789 case 0xffffffb8:
790 case 0xffffffbc:
791 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
792 the exception return behavior. */
793 case 0xffffffe1:
794 case 0xffffffe9:
795 case 0xffffffed:
796 case 0xfffffff1:
797 case 0xfffffff9:
798 case 0xfffffffd:
799 /* Address is magic. */
800 return 1;
801
802 default:
803 /* Address is not magic. */
804 return 0;
805 }
806 }
807 }
808
809 /* Remove useless bits from addresses in a running program. */
810 static CORE_ADDR
811 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
812 {
813 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
814
815 /* On M-profile devices, do not strip the low bit from EXC_RETURN
816 (the magic exception return address). */
817 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
818 return val;
819
820 if (arm_apcs_32)
821 return UNMAKE_THUMB_ADDR (val);
822 else
823 return (val & 0x03fffffc);
824 }
825
826 /* Return 1 if PC is the start of a compiler helper function which
827 can be safely ignored during prologue skipping. IS_THUMB is true
828 if the function is known to be a Thumb function due to the way it
829 is being called. */
830 static int
831 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
832 {
833 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
834 struct bound_minimal_symbol msym;
835
836 msym = lookup_minimal_symbol_by_pc (pc);
837 if (msym.minsym != NULL
838 && msym.value_address () == pc
839 && msym.minsym->linkage_name () != NULL)
840 {
841 const char *name = msym.minsym->linkage_name ();
842
843 /* The GNU linker's Thumb call stub to foo is named
844 __foo_from_thumb. */
845 if (strstr (name, "_from_thumb") != NULL)
846 name += 2;
847
848 /* On soft-float targets, __truncdfsf2 is called to convert promoted
849 arguments to their argument types in non-prototyped
850 functions. */
851 if (startswith (name, "__truncdfsf2"))
852 return 1;
853 if (startswith (name, "__aeabi_d2f"))
854 return 1;
855
856 /* Internal functions related to thread-local storage. */
857 if (startswith (name, "__tls_get_addr"))
858 return 1;
859 if (startswith (name, "__aeabi_read_tp"))
860 return 1;
861 }
862 else
863 {
864 /* If we run against a stripped glibc, we may be unable to identify
865 special functions by name. Check for one important case,
866 __aeabi_read_tp, by comparing the *code* against the default
867 implementation (this is hand-written ARM assembler in glibc). */
868
869 if (!is_thumb
870 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
871 == 0xe3e00a0f /* mov r0, #0xffff0fff */
872 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
873 == 0xe240f01f) /* sub pc, r0, #31 */
874 return 1;
875 }
876
877 return 0;
878 }
879
880 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
881 the first 16-bit of instruction, and INSN2 is the second 16-bit of
882 instruction. */
883 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
884 ((bits ((insn1), 0, 3) << 12) \
885 | (bits ((insn1), 10, 10) << 11) \
886 | (bits ((insn2), 12, 14) << 8) \
887 | bits ((insn2), 0, 7))
888
889 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
890 the 32-bit instruction. */
891 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
892 ((bits ((insn), 16, 19) << 12) \
893 | bits ((insn), 0, 11))
894
895 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
896
897 static unsigned int
898 thumb_expand_immediate (unsigned int imm)
899 {
900 unsigned int count = imm >> 7;
901
902 if (count < 8)
903 switch (count / 2)
904 {
905 case 0:
906 return imm & 0xff;
907 case 1:
908 return (imm & 0xff) | ((imm & 0xff) << 16);
909 case 2:
910 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
911 case 3:
912 return (imm & 0xff) | ((imm & 0xff) << 8)
913 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
914 }
915
916 return (0x80 | (imm & 0x7f)) << (32 - count);
917 }
918
919 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
920 epilogue, 0 otherwise. */
921
922 static int
923 thumb_instruction_restores_sp (unsigned short insn)
924 {
925 return (insn == 0x46bd /* mov sp, r7 */
926 || (insn & 0xff80) == 0xb000 /* add sp, imm */
927 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
928 }
929
930 /* Analyze a Thumb prologue, looking for a recognizable stack frame
931 and frame pointer. Scan until we encounter a store that could
932 clobber the stack frame unexpectedly, or an unknown instruction.
933 Return the last address which is definitely safe to skip for an
934 initial breakpoint. */
935
936 static CORE_ADDR
937 thumb_analyze_prologue (struct gdbarch *gdbarch,
938 CORE_ADDR start, CORE_ADDR limit,
939 struct arm_prologue_cache *cache)
940 {
941 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
942 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
943 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
944 int i;
945 pv_t regs[16];
946 CORE_ADDR offset;
947 CORE_ADDR unrecognized_pc = 0;
948
949 for (i = 0; i < 16; i++)
950 regs[i] = pv_register (i, 0);
951 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
952
953 while (start < limit)
954 {
955 unsigned short insn;
956 gdb::optional<bool> ra_signed_state;
957
958 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
959
960 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
961 {
962 int regno;
963 int mask;
964
965 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
966 break;
967
968 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
969 whether to save LR (R14). */
970 mask = (insn & 0xff) | ((insn & 0x100) << 6);
971
972 /* Calculate offsets of saved R0-R7 and LR. */
973 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
974 if (mask & (1 << regno))
975 {
976 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
977 -4);
978 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
979 }
980 }
981 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
982 {
983 offset = (insn & 0x7f) << 2; /* get scaled offset */
984 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
985 -offset);
986 }
987 else if (thumb_instruction_restores_sp (insn))
988 {
989 /* Don't scan past the epilogue. */
990 break;
991 }
992 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
993 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
994 (insn & 0xff) << 2);
995 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
996 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
997 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
998 bits (insn, 6, 8));
999 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1000 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1001 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1002 bits (insn, 0, 7));
1003 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1004 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1005 && pv_is_constant (regs[bits (insn, 3, 5)]))
1006 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1007 regs[bits (insn, 6, 8)]);
1008 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1009 && pv_is_constant (regs[bits (insn, 3, 6)]))
1010 {
1011 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1012 int rm = bits (insn, 3, 6);
1013 regs[rd] = pv_add (regs[rd], regs[rm]);
1014 }
1015 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1016 {
1017 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1018 int src_reg = (insn & 0x78) >> 3;
1019 regs[dst_reg] = regs[src_reg];
1020 }
1021 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1022 {
1023 /* Handle stores to the stack. Normally pushes are used,
1024 but with GCC -mtpcs-frame, there may be other stores
1025 in the prologue to create the frame. */
1026 int regno = (insn >> 8) & 0x7;
1027 pv_t addr;
1028
1029 offset = (insn & 0xff) << 2;
1030 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1031
1032 if (stack.store_would_trash (addr))
1033 break;
1034
1035 stack.store (addr, 4, regs[regno]);
1036 }
1037 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1038 {
1039 int rd = bits (insn, 0, 2);
1040 int rn = bits (insn, 3, 5);
1041 pv_t addr;
1042
1043 offset = bits (insn, 6, 10) << 2;
1044 addr = pv_add_constant (regs[rn], offset);
1045
1046 if (stack.store_would_trash (addr))
1047 break;
1048
1049 stack.store (addr, 4, regs[rd]);
1050 }
1051 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1052 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1053 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1054 /* Ignore stores of argument registers to the stack. */
1055 ;
1056 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1057 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1058 /* Ignore block loads from the stack, potentially copying
1059 parameters from memory. */
1060 ;
1061 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1062 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1063 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1064 /* Similarly ignore single loads from the stack. */
1065 ;
1066 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1067 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1068 /* Skip register copies, i.e. saves to another register
1069 instead of the stack. */
1070 ;
1071 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1072 /* Recognize constant loads; even with small stacks these are necessary
1073 on Thumb. */
1074 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1075 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1076 {
1077 /* Constant pool loads, for the same reason. */
1078 unsigned int constant;
1079 CORE_ADDR loc;
1080
1081 loc = start + 4 + bits (insn, 0, 7) * 4;
1082 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1083 regs[bits (insn, 8, 10)] = pv_constant (constant);
1084 }
1085 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1086 {
1087 unsigned short inst2;
1088
1089 inst2 = read_code_unsigned_integer (start + 2, 2,
1090 byte_order_for_code);
1091 uint32_t whole_insn = (insn << 16) | inst2;
1092
1093 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1094 {
1095 /* BL, BLX. Allow some special function calls when
1096 skipping the prologue; GCC generates these before
1097 storing arguments to the stack. */
1098 CORE_ADDR nextpc;
1099 int j1, j2, imm1, imm2;
1100
1101 imm1 = sbits (insn, 0, 10);
1102 imm2 = bits (inst2, 0, 10);
1103 j1 = bit (inst2, 13);
1104 j2 = bit (inst2, 11);
1105
1106 offset = ((imm1 << 12) + (imm2 << 1));
1107 offset ^= ((!j2) << 22) | ((!j1) << 23);
1108
1109 nextpc = start + 4 + offset;
1110 /* For BLX make sure to clear the low bits. */
1111 if (bit (inst2, 12) == 0)
1112 nextpc = nextpc & 0xfffffffc;
1113
1114 if (!skip_prologue_function (gdbarch, nextpc,
1115 bit (inst2, 12) != 0))
1116 break;
1117 }
1118
1119 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1120 { registers } */
1121 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1122 {
1123 pv_t addr = regs[bits (insn, 0, 3)];
1124 int regno;
1125
1126 if (stack.store_would_trash (addr))
1127 break;
1128
1129 /* Calculate offsets of saved registers. */
1130 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1131 if (inst2 & (1 << regno))
1132 {
1133 addr = pv_add_constant (addr, -4);
1134 stack.store (addr, 4, regs[regno]);
1135 }
1136
1137 if (insn & 0x0020)
1138 regs[bits (insn, 0, 3)] = addr;
1139 }
1140
1141 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1142 else if ((insn & 0xff20) == 0xed20
1143 && (inst2 & 0x0f00) == 0x0b00
1144 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1145 {
1146 /* Address SP points to. */
1147 pv_t addr = regs[bits (insn, 0, 3)];
1148
1149 /* Number of registers saved. */
1150 unsigned int number = bits (inst2, 0, 7) >> 1;
1151
1152 /* First register to save. */
1153 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1154
1155 if (stack.store_would_trash (addr))
1156 break;
1157
1158 /* Calculate offsets of saved registers. */
1159 for (; number > 0; number--)
1160 {
1161 addr = pv_add_constant (addr, -8);
1162 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1163 + vd + number, 0));
1164 }
1165
1166 /* Writeback SP to account for the saved registers. */
1167 regs[bits (insn, 0, 3)] = addr;
1168 }
1169
1170 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1171 [Rn, #+/-imm]{!} */
1172 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1173 {
1174 int regno1 = bits (inst2, 12, 15);
1175 int regno2 = bits (inst2, 8, 11);
1176 pv_t addr = regs[bits (insn, 0, 3)];
1177
1178 offset = inst2 & 0xff;
1179 if (insn & 0x0080)
1180 addr = pv_add_constant (addr, offset);
1181 else
1182 addr = pv_add_constant (addr, -offset);
1183
1184 if (stack.store_would_trash (addr))
1185 break;
1186
1187 stack.store (addr, 4, regs[regno1]);
1188 stack.store (pv_add_constant (addr, 4),
1189 4, regs[regno2]);
1190
1191 if (insn & 0x0020)
1192 regs[bits (insn, 0, 3)] = addr;
1193 }
1194
1195 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1196 && (inst2 & 0x0c00) == 0x0c00
1197 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1198 {
1199 int regno = bits (inst2, 12, 15);
1200 pv_t addr = regs[bits (insn, 0, 3)];
1201
1202 offset = inst2 & 0xff;
1203 if (inst2 & 0x0200)
1204 addr = pv_add_constant (addr, offset);
1205 else
1206 addr = pv_add_constant (addr, -offset);
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 stack.store (addr, 4, regs[regno]);
1212
1213 if (inst2 & 0x0100)
1214 regs[bits (insn, 0, 3)] = addr;
1215 }
1216
1217 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1218 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1219 {
1220 int regno = bits (inst2, 12, 15);
1221 pv_t addr;
1222
1223 offset = inst2 & 0xfff;
1224 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1225
1226 if (stack.store_would_trash (addr))
1227 break;
1228
1229 stack.store (addr, 4, regs[regno]);
1230 }
1231
1232 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1233 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1234 /* Ignore stores of argument registers to the stack. */
1235 ;
1236
1237 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1238 && (inst2 & 0x0d00) == 0x0c00
1239 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1240 /* Ignore stores of argument registers to the stack. */
1241 ;
1242
1243 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1244 { registers } */
1245 && (inst2 & 0x8000) == 0x0000
1246 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1247 /* Ignore block loads from the stack, potentially copying
1248 parameters from memory. */
1249 ;
1250
1251 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1252 [Rn, #+/-imm] */
1253 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1254 /* Similarly ignore dual loads from the stack. */
1255 ;
1256
1257 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1258 && (inst2 & 0x0d00) == 0x0c00
1259 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1260 /* Similarly ignore single loads from the stack. */
1261 ;
1262
1263 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1264 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1265 /* Similarly ignore single loads from the stack. */
1266 ;
1267
1268 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1269 && (inst2 & 0x8000) == 0x0000)
1270 {
1271 unsigned int imm = ((bits (insn, 10, 10) << 11)
1272 | (bits (inst2, 12, 14) << 8)
1273 | bits (inst2, 0, 7));
1274
1275 regs[bits (inst2, 8, 11)]
1276 = pv_add_constant (regs[bits (insn, 0, 3)],
1277 thumb_expand_immediate (imm));
1278 }
1279
1280 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1281 && (inst2 & 0x8000) == 0x0000)
1282 {
1283 unsigned int imm = ((bits (insn, 10, 10) << 11)
1284 | (bits (inst2, 12, 14) << 8)
1285 | bits (inst2, 0, 7));
1286
1287 regs[bits (inst2, 8, 11)]
1288 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1289 }
1290
1291 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1292 && (inst2 & 0x8000) == 0x0000)
1293 {
1294 unsigned int imm = ((bits (insn, 10, 10) << 11)
1295 | (bits (inst2, 12, 14) << 8)
1296 | bits (inst2, 0, 7));
1297
1298 regs[bits (inst2, 8, 11)]
1299 = pv_add_constant (regs[bits (insn, 0, 3)],
1300 - (CORE_ADDR) thumb_expand_immediate (imm));
1301 }
1302
1303 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1304 && (inst2 & 0x8000) == 0x0000)
1305 {
1306 unsigned int imm = ((bits (insn, 10, 10) << 11)
1307 | (bits (inst2, 12, 14) << 8)
1308 | bits (inst2, 0, 7));
1309
1310 regs[bits (inst2, 8, 11)]
1311 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1312 }
1313
1314 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1315 {
1316 unsigned int imm = ((bits (insn, 10, 10) << 11)
1317 | (bits (inst2, 12, 14) << 8)
1318 | bits (inst2, 0, 7));
1319
1320 regs[bits (inst2, 8, 11)]
1321 = pv_constant (thumb_expand_immediate (imm));
1322 }
1323
1324 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1325 {
1326 unsigned int imm
1327 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1328
1329 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1330 }
1331
1332 else if (insn == 0xea5f /* mov.w Rd,Rm */
1333 && (inst2 & 0xf0f0) == 0)
1334 {
1335 int dst_reg = (inst2 & 0x0f00) >> 8;
1336 int src_reg = inst2 & 0xf;
1337 regs[dst_reg] = regs[src_reg];
1338 }
1339
1340 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1341 {
1342 /* Constant pool loads. */
1343 unsigned int constant;
1344 CORE_ADDR loc;
1345
1346 offset = bits (inst2, 0, 11);
1347 if (insn & 0x0080)
1348 loc = start + 4 + offset;
1349 else
1350 loc = start + 4 - offset;
1351
1352 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1353 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1354 }
1355
1356 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1357 {
1358 /* Constant pool loads. */
1359 unsigned int constant;
1360 CORE_ADDR loc;
1361
1362 offset = bits (inst2, 0, 7) << 2;
1363 if (insn & 0x0080)
1364 loc = start + 4 + offset;
1365 else
1366 loc = start + 4 - offset;
1367
1368 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1369 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1370
1371 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1372 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1373 }
1374 /* Start of ARMv8.1-m PACBTI extension instructions. */
1375 else if (IS_PAC (whole_insn))
1376 {
1377 /* LR and SP are input registers. PAC is in R12. LR is
1378 signed from this point onwards. NOP space. */
1379 ra_signed_state = true;
1380 }
1381 else if (IS_PACBTI (whole_insn))
1382 {
1383 /* LR and SP are input registers. PAC is in R12 and PC is a
1384 valid BTI landing pad. LR is signed from this point onwards.
1385 NOP space. */
1386 ra_signed_state = true;
1387 }
1388 else if (IS_BTI (whole_insn))
1389 {
1390 /* Valid BTI landing pad. NOP space. */
1391 }
1392 else if (IS_PACG (whole_insn))
1393 {
1394 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1395 this point onwards. */
1396 ra_signed_state = true;
1397 }
1398 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1399 {
1400 /* These instructions appear close to the epilogue, when signed
1401 pointers are getting authenticated. */
1402 ra_signed_state = false;
1403 }
1404 /* End of ARMv8.1-m PACBTI extension instructions */
1405 else if (thumb2_instruction_changes_pc (insn, inst2))
1406 {
1407 /* Don't scan past anything that might change control flow. */
1408 break;
1409 }
1410 else
1411 {
1412 /* The optimizer might shove anything into the prologue,
1413 so we just skip what we don't recognize. */
1414 unrecognized_pc = start;
1415 }
1416
1417 /* Make sure we are dealing with a target that supports ARMv8.1-m
1418 PACBTI. */
1419 if (cache != nullptr && tdep->have_pacbti
1420 && ra_signed_state.has_value ())
1421 {
1422 arm_debug_printf ("Found pacbti instruction at %s",
1423 paddress (gdbarch, start));
1424 arm_debug_printf ("RA is %s",
1425 *ra_signed_state? "signed" : "not signed");
1426 cache->ra_signed_state = ra_signed_state;
1427 }
1428
1429 start += 2;
1430 }
1431 else if (thumb_instruction_changes_pc (insn))
1432 {
1433 /* Don't scan past anything that might change control flow. */
1434 break;
1435 }
1436 else
1437 {
1438 /* The optimizer might shove anything into the prologue,
1439 so we just skip what we don't recognize. */
1440 unrecognized_pc = start;
1441 }
1442
1443 start += 2;
1444 }
1445
1446 arm_debug_printf ("Prologue scan stopped at %s",
1447 paddress (gdbarch, start));
1448
1449 if (unrecognized_pc == 0)
1450 unrecognized_pc = start;
1451
1452 if (cache == NULL)
1453 return unrecognized_pc;
1454
1455 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1456 {
1457 /* Frame pointer is fp. Frame size is constant. */
1458 cache->framereg = ARM_FP_REGNUM;
1459 cache->framesize = -regs[ARM_FP_REGNUM].k;
1460 }
1461 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1462 {
1463 /* Frame pointer is r7. Frame size is constant. */
1464 cache->framereg = THUMB_FP_REGNUM;
1465 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1466 }
1467 else
1468 {
1469 /* Try the stack pointer... this is a bit desperate. */
1470 cache->framereg = ARM_SP_REGNUM;
1471 cache->framesize = -regs[ARM_SP_REGNUM].k;
1472 }
1473
1474 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1475 if (stack.find_reg (gdbarch, i, &offset))
1476 {
1477 cache->saved_regs[i].set_addr (offset);
1478 if (i == ARM_SP_REGNUM)
1479 arm_cache_set_active_sp_value(cache, tdep, offset);
1480 }
1481
1482 return unrecognized_pc;
1483 }
1484
1485
1486 /* Try to analyze the instructions starting from PC, which load symbol
1487 __stack_chk_guard. Return the address of instruction after loading this
1488 symbol, set the dest register number to *BASEREG, and set the size of
1489 instructions for loading symbol in OFFSET. Return 0 if instructions are
1490 not recognized. */
1491
1492 static CORE_ADDR
1493 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1494 unsigned int *destreg, int *offset)
1495 {
1496 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1497 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1498 unsigned int low, high, address;
1499
1500 address = 0;
1501 if (is_thumb)
1502 {
1503 unsigned short insn1
1504 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1505
1506 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1507 {
1508 *destreg = bits (insn1, 8, 10);
1509 *offset = 2;
1510 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1511 address = read_memory_unsigned_integer (address, 4,
1512 byte_order_for_code);
1513 }
1514 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1515 {
1516 unsigned short insn2
1517 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1518
1519 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1520
1521 insn1
1522 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1523 insn2
1524 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1525
1526 /* movt Rd, #const */
1527 if ((insn1 & 0xfbc0) == 0xf2c0)
1528 {
1529 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1530 *destreg = bits (insn2, 8, 11);
1531 *offset = 8;
1532 address = (high << 16 | low);
1533 }
1534 }
1535 }
1536 else
1537 {
1538 unsigned int insn
1539 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1540
1541 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1542 {
1543 address = bits (insn, 0, 11) + pc + 8;
1544 address = read_memory_unsigned_integer (address, 4,
1545 byte_order_for_code);
1546
1547 *destreg = bits (insn, 12, 15);
1548 *offset = 4;
1549 }
1550 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1551 {
1552 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1553
1554 insn
1555 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1556
1557 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1558 {
1559 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1560 *destreg = bits (insn, 12, 15);
1561 *offset = 8;
1562 address = (high << 16 | low);
1563 }
1564 }
1565 }
1566
1567 return address;
1568 }
1569
1570 /* Try to skip a sequence of instructions used for stack protector. If PC
1571 points to the first instruction of this sequence, return the address of
1572 first instruction after this sequence, otherwise, return original PC.
1573
1574 On arm, this sequence of instructions is composed of mainly three steps,
1575 Step 1: load symbol __stack_chk_guard,
1576 Step 2: load from address of __stack_chk_guard,
1577 Step 3: store it to somewhere else.
1578
1579 Usually, instructions on step 2 and step 3 are the same on various ARM
1580 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1581 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1582 instructions in step 1 vary from different ARM architectures. On ARMv7,
1583 they are,
1584
1585 movw Rn, #:lower16:__stack_chk_guard
1586 movt Rn, #:upper16:__stack_chk_guard
1587
1588 On ARMv5t, it is,
1589
1590 ldr Rn, .Label
1591 ....
1592 .Lable:
1593 .word __stack_chk_guard
1594
1595 Since ldr/str is a very popular instruction, we can't use them as
1596 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1597 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1598 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1599
1600 static CORE_ADDR
1601 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1602 {
1603 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1604 unsigned int basereg;
1605 struct bound_minimal_symbol stack_chk_guard;
1606 int offset;
1607 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1608 CORE_ADDR addr;
1609
1610 /* Try to parse the instructions in Step 1. */
1611 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1612 &basereg, &offset);
1613 if (!addr)
1614 return pc;
1615
1616 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1617 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1618 Otherwise, this sequence cannot be for stack protector. */
1619 if (stack_chk_guard.minsym == NULL
1620 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1621 return pc;
1622
1623 if (is_thumb)
1624 {
1625 unsigned int destreg;
1626 unsigned short insn
1627 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1628
1629 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1630 if ((insn & 0xf800) != 0x6800)
1631 return pc;
1632 if (bits (insn, 3, 5) != basereg)
1633 return pc;
1634 destreg = bits (insn, 0, 2);
1635
1636 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1637 byte_order_for_code);
1638 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1639 if ((insn & 0xf800) != 0x6000)
1640 return pc;
1641 if (destreg != bits (insn, 0, 2))
1642 return pc;
1643 }
1644 else
1645 {
1646 unsigned int destreg;
1647 unsigned int insn
1648 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1649
1650 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1651 if ((insn & 0x0e500000) != 0x04100000)
1652 return pc;
1653 if (bits (insn, 16, 19) != basereg)
1654 return pc;
1655 destreg = bits (insn, 12, 15);
1656 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1657 insn = read_code_unsigned_integer (pc + offset + 4,
1658 4, byte_order_for_code);
1659 if ((insn & 0x0e500000) != 0x04000000)
1660 return pc;
1661 if (bits (insn, 12, 15) != destreg)
1662 return pc;
1663 }
1664 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1665 on arm. */
1666 if (is_thumb)
1667 return pc + offset + 4;
1668 else
1669 return pc + offset + 8;
1670 }
1671
1672 /* Advance the PC across any function entry prologue instructions to
1673 reach some "real" code.
1674
1675 The APCS (ARM Procedure Call Standard) defines the following
1676 prologue:
1677
1678 mov ip, sp
1679 [stmfd sp!, {a1,a2,a3,a4}]
1680 stmfd sp!, {...,fp,ip,lr,pc}
1681 [stfe f7, [sp, #-12]!]
1682 [stfe f6, [sp, #-12]!]
1683 [stfe f5, [sp, #-12]!]
1684 [stfe f4, [sp, #-12]!]
1685 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1686
1687 static CORE_ADDR
1688 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1689 {
1690 CORE_ADDR func_addr, limit_pc;
1691
1692 /* See if we can determine the end of the prologue via the symbol table.
1693 If so, then return either PC, or the PC after the prologue, whichever
1694 is greater. */
1695 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1696 {
1697 CORE_ADDR post_prologue_pc
1698 = skip_prologue_using_sal (gdbarch, func_addr);
1699 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1700
1701 if (post_prologue_pc)
1702 post_prologue_pc
1703 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1704
1705
1706 /* GCC always emits a line note before the prologue and another
1707 one after, even if the two are at the same address or on the
1708 same line. Take advantage of this so that we do not need to
1709 know every instruction that might appear in the prologue. We
1710 will have producer information for most binaries; if it is
1711 missing (e.g. for -gstabs), assuming the GNU tools. */
1712 if (post_prologue_pc
1713 && (cust == NULL
1714 || cust->producer () == NULL
1715 || startswith (cust->producer (), "GNU ")
1716 || producer_is_llvm (cust->producer ())))
1717 return post_prologue_pc;
1718
1719 if (post_prologue_pc != 0)
1720 {
1721 CORE_ADDR analyzed_limit;
1722
1723 /* For non-GCC compilers, make sure the entire line is an
1724 acceptable prologue; GDB will round this function's
1725 return value up to the end of the following line so we
1726 can not skip just part of a line (and we do not want to).
1727
1728 RealView does not treat the prologue specially, but does
1729 associate prologue code with the opening brace; so this
1730 lets us skip the first line if we think it is the opening
1731 brace. */
1732 if (arm_pc_is_thumb (gdbarch, func_addr))
1733 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1734 post_prologue_pc, NULL);
1735 else
1736 analyzed_limit
1737 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1738 NULL, target_arm_instruction_reader ());
1739
1740 if (analyzed_limit != post_prologue_pc)
1741 return func_addr;
1742
1743 return post_prologue_pc;
1744 }
1745 }
1746
1747 /* Can't determine prologue from the symbol table, need to examine
1748 instructions. */
1749
1750 /* Find an upper limit on the function prologue using the debug
1751 information. If the debug information could not be used to provide
1752 that bound, then use an arbitrary large number as the upper bound. */
1753 /* Like arm_scan_prologue, stop no later than pc + 64. */
1754 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1755 if (limit_pc == 0)
1756 limit_pc = pc + 64; /* Magic. */
1757
1758
1759 /* Check if this is Thumb code. */
1760 if (arm_pc_is_thumb (gdbarch, pc))
1761 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1762 else
1763 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1764 target_arm_instruction_reader ());
1765 }
1766
1767 /* *INDENT-OFF* */
1768 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1769 This function decodes a Thumb function prologue to determine:
1770 1) the size of the stack frame
1771 2) which registers are saved on it
1772 3) the offsets of saved regs
1773 4) the offset from the stack pointer to the frame pointer
1774
1775 A typical Thumb function prologue would create this stack frame
1776 (offsets relative to FP)
1777 old SP -> 24 stack parameters
1778 20 LR
1779 16 R7
1780 R7 -> 0 local variables (16 bytes)
1781 SP -> -12 additional stack space (12 bytes)
1782 The frame size would thus be 36 bytes, and the frame offset would be
1783 12 bytes. The frame register is R7.
1784
1785 The comments for thumb_skip_prolog() describe the algorithm we use
1786 to detect the end of the prolog. */
1787 /* *INDENT-ON* */
1788
1789 static void
1790 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1791 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1792 {
1793 CORE_ADDR prologue_start;
1794 CORE_ADDR prologue_end;
1795
1796 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1797 &prologue_end))
1798 {
1799 /* See comment in arm_scan_prologue for an explanation of
1800 this heuristics. */
1801 if (prologue_end > prologue_start + 64)
1802 {
1803 prologue_end = prologue_start + 64;
1804 }
1805 }
1806 else
1807 /* We're in the boondocks: we have no idea where the start of the
1808 function is. */
1809 return;
1810
1811 prologue_end = std::min (prologue_end, prev_pc);
1812
1813 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1814 }
1815
1816 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1817 otherwise. */
1818
1819 static int
1820 arm_instruction_restores_sp (unsigned int insn)
1821 {
1822 if (bits (insn, 28, 31) != INST_NV)
1823 {
1824 if ((insn & 0x0df0f000) == 0x0080d000
1825 /* ADD SP (register or immediate). */
1826 || (insn & 0x0df0f000) == 0x0040d000
1827 /* SUB SP (register or immediate). */
1828 || (insn & 0x0ffffff0) == 0x01a0d000
1829 /* MOV SP. */
1830 || (insn & 0x0fff0000) == 0x08bd0000
1831 /* POP (LDMIA). */
1832 || (insn & 0x0fff0000) == 0x049d0000)
1833 /* POP of a single register. */
1834 return 1;
1835 }
1836
1837 return 0;
1838 }
1839
1840 /* Implement immediate value decoding, as described in section A5.2.4
1841 (Modified immediate constants in ARM instructions) of the ARM Architecture
1842 Reference Manual (ARMv7-A and ARMv7-R edition). */
1843
1844 static uint32_t
1845 arm_expand_immediate (uint32_t imm)
1846 {
1847 /* Immediate values are 12 bits long. */
1848 gdb_assert ((imm & 0xfffff000) == 0);
1849
1850 uint32_t unrotated_value = imm & 0xff;
1851 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1852
1853 if (rotate_amount == 0)
1854 return unrotated_value;
1855
1856 return ((unrotated_value >> rotate_amount)
1857 | (unrotated_value << (32 - rotate_amount)));
1858 }
1859
1860 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1861 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1862 fill it in. Return the first address not recognized as a prologue
1863 instruction.
1864
1865 We recognize all the instructions typically found in ARM prologues,
1866 plus harmless instructions which can be skipped (either for analysis
1867 purposes, or a more restrictive set that can be skipped when finding
1868 the end of the prologue). */
1869
1870 static CORE_ADDR
1871 arm_analyze_prologue (struct gdbarch *gdbarch,
1872 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1873 struct arm_prologue_cache *cache,
1874 const arm_instruction_reader &insn_reader)
1875 {
1876 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1877 int regno;
1878 CORE_ADDR offset, current_pc;
1879 pv_t regs[ARM_FPS_REGNUM];
1880 CORE_ADDR unrecognized_pc = 0;
1881 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1882
1883 /* Search the prologue looking for instructions that set up the
1884 frame pointer, adjust the stack pointer, and save registers.
1885
1886 Be careful, however, and if it doesn't look like a prologue,
1887 don't try to scan it. If, for instance, a frameless function
1888 begins with stmfd sp!, then we will tell ourselves there is
1889 a frame, which will confuse stack traceback, as well as "finish"
1890 and other operations that rely on a knowledge of the stack
1891 traceback. */
1892
1893 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 regs[regno] = pv_register (regno, 0);
1895 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1896
1897 for (current_pc = prologue_start;
1898 current_pc < prologue_end;
1899 current_pc += 4)
1900 {
1901 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1902
1903 if (insn == 0xe1a0c00d) /* mov ip, sp */
1904 {
1905 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1906 continue;
1907 }
1908 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1909 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1910 {
1911 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1912 int rd = bits (insn, 12, 15);
1913 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1914 continue;
1915 }
1916 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1917 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1918 {
1919 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1920 int rd = bits (insn, 12, 15);
1921 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1922 continue;
1923 }
1924 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1925 [sp, #-4]! */
1926 {
1927 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1928 break;
1929 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1930 stack.store (regs[ARM_SP_REGNUM], 4,
1931 regs[bits (insn, 12, 15)]);
1932 continue;
1933 }
1934 else if ((insn & 0xffff0000) == 0xe92d0000)
1935 /* stmfd sp!, {..., fp, ip, lr, pc}
1936 or
1937 stmfd sp!, {a1, a2, a3, a4} */
1938 {
1939 int mask = insn & 0xffff;
1940
1941 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1942 break;
1943
1944 /* Calculate offsets of saved registers. */
1945 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1946 if (mask & (1 << regno))
1947 {
1948 regs[ARM_SP_REGNUM]
1949 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1950 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1951 }
1952 }
1953 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1954 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1955 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1956 {
1957 /* No need to add this to saved_regs -- it's just an arg reg. */
1958 continue;
1959 }
1960 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1961 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1962 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1963 {
1964 /* No need to add this to saved_regs -- it's just an arg reg. */
1965 continue;
1966 }
1967 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1968 { registers } */
1969 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1970 {
1971 /* No need to add this to saved_regs -- it's just arg regs. */
1972 continue;
1973 }
1974 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1975 {
1976 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1977 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1978 }
1979 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1980 {
1981 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1982 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1983 }
1984 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1985 [sp, -#c]! */
1986 && tdep->have_fpa_registers)
1987 {
1988 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1989 break;
1990
1991 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1992 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1993 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1994 }
1995 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1996 [sp!] */
1997 && tdep->have_fpa_registers)
1998 {
1999 int n_saved_fp_regs;
2000 unsigned int fp_start_reg, fp_bound_reg;
2001
2002 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2003 break;
2004
2005 if ((insn & 0x800) == 0x800) /* N0 is set */
2006 {
2007 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2008 n_saved_fp_regs = 3;
2009 else
2010 n_saved_fp_regs = 1;
2011 }
2012 else
2013 {
2014 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2015 n_saved_fp_regs = 2;
2016 else
2017 n_saved_fp_regs = 4;
2018 }
2019
2020 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2021 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2022 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2023 {
2024 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2025 stack.store (regs[ARM_SP_REGNUM], 12,
2026 regs[fp_start_reg++]);
2027 }
2028 }
2029 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2030 {
2031 /* Allow some special function calls when skipping the
2032 prologue; GCC generates these before storing arguments to
2033 the stack. */
2034 CORE_ADDR dest = BranchDest (current_pc, insn);
2035
2036 if (skip_prologue_function (gdbarch, dest, 0))
2037 continue;
2038 else
2039 break;
2040 }
2041 else if ((insn & 0xf0000000) != 0xe0000000)
2042 break; /* Condition not true, exit early. */
2043 else if (arm_instruction_changes_pc (insn))
2044 /* Don't scan past anything that might change control flow. */
2045 break;
2046 else if (arm_instruction_restores_sp (insn))
2047 {
2048 /* Don't scan past the epilogue. */
2049 break;
2050 }
2051 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2052 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2053 /* Ignore block loads from the stack, potentially copying
2054 parameters from memory. */
2055 continue;
2056 else if ((insn & 0xfc500000) == 0xe4100000
2057 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2058 /* Similarly ignore single loads from the stack. */
2059 continue;
2060 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2061 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2062 register instead of the stack. */
2063 continue;
2064 else
2065 {
2066 /* The optimizer might shove anything into the prologue, if
2067 we build up cache (cache != NULL) from scanning prologue,
2068 we just skip what we don't recognize and scan further to
2069 make cache as complete as possible. However, if we skip
2070 prologue, we'll stop immediately on unrecognized
2071 instruction. */
2072 unrecognized_pc = current_pc;
2073 if (cache != NULL)
2074 continue;
2075 else
2076 break;
2077 }
2078 }
2079
2080 if (unrecognized_pc == 0)
2081 unrecognized_pc = current_pc;
2082
2083 if (cache)
2084 {
2085 int framereg, framesize;
2086
2087 /* The frame size is just the distance from the frame register
2088 to the original stack pointer. */
2089 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2090 {
2091 /* Frame pointer is fp. */
2092 framereg = ARM_FP_REGNUM;
2093 framesize = -regs[ARM_FP_REGNUM].k;
2094 }
2095 else
2096 {
2097 /* Try the stack pointer... this is a bit desperate. */
2098 framereg = ARM_SP_REGNUM;
2099 framesize = -regs[ARM_SP_REGNUM].k;
2100 }
2101
2102 cache->framereg = framereg;
2103 cache->framesize = framesize;
2104
2105 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2106 if (stack.find_reg (gdbarch, regno, &offset))
2107 {
2108 cache->saved_regs[regno].set_addr (offset);
2109 if (regno == ARM_SP_REGNUM)
2110 arm_cache_set_active_sp_value(cache, tdep, offset);
2111 }
2112 }
2113
2114 arm_debug_printf ("Prologue scan stopped at %s",
2115 paddress (gdbarch, unrecognized_pc));
2116
2117 return unrecognized_pc;
2118 }
2119
2120 static void
2121 arm_scan_prologue (struct frame_info *this_frame,
2122 struct arm_prologue_cache *cache)
2123 {
2124 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2125 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2126 CORE_ADDR prologue_start, prologue_end;
2127 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2128 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2129 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2130
2131 /* Assume there is no frame until proven otherwise. */
2132 cache->framereg = ARM_SP_REGNUM;
2133 cache->framesize = 0;
2134
2135 /* Check for Thumb prologue. */
2136 if (arm_frame_is_thumb (this_frame))
2137 {
2138 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2139 return;
2140 }
2141
2142 /* Find the function prologue. If we can't find the function in
2143 the symbol table, peek in the stack frame to find the PC. */
2144 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2145 &prologue_end))
2146 {
2147 /* One way to find the end of the prologue (which works well
2148 for unoptimized code) is to do the following:
2149
2150 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2151
2152 if (sal.line == 0)
2153 prologue_end = prev_pc;
2154 else if (sal.end < prologue_end)
2155 prologue_end = sal.end;
2156
2157 This mechanism is very accurate so long as the optimizer
2158 doesn't move any instructions from the function body into the
2159 prologue. If this happens, sal.end will be the last
2160 instruction in the first hunk of prologue code just before
2161 the first instruction that the scheduler has moved from
2162 the body to the prologue.
2163
2164 In order to make sure that we scan all of the prologue
2165 instructions, we use a slightly less accurate mechanism which
2166 may scan more than necessary. To help compensate for this
2167 lack of accuracy, the prologue scanning loop below contains
2168 several clauses which'll cause the loop to terminate early if
2169 an implausible prologue instruction is encountered.
2170
2171 The expression
2172
2173 prologue_start + 64
2174
2175 is a suitable endpoint since it accounts for the largest
2176 possible prologue plus up to five instructions inserted by
2177 the scheduler. */
2178
2179 if (prologue_end > prologue_start + 64)
2180 {
2181 prologue_end = prologue_start + 64; /* See above. */
2182 }
2183 }
2184 else
2185 {
2186 /* We have no symbol information. Our only option is to assume this
2187 function has a standard stack frame and the normal frame register.
2188 Then, we can find the value of our frame pointer on entrance to
2189 the callee (or at the present moment if this is the innermost frame).
2190 The value stored there should be the address of the stmfd + 8. */
2191 CORE_ADDR frame_loc;
2192 ULONGEST return_value;
2193
2194 /* AAPCS does not use a frame register, so we can abort here. */
2195 if (tdep->arm_abi == ARM_ABI_AAPCS)
2196 return;
2197
2198 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2199 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2200 &return_value))
2201 return;
2202 else
2203 {
2204 prologue_start = gdbarch_addr_bits_remove
2205 (gdbarch, return_value) - 8;
2206 prologue_end = prologue_start + 64; /* See above. */
2207 }
2208 }
2209
2210 if (prev_pc < prologue_end)
2211 prologue_end = prev_pc;
2212
2213 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2214 target_arm_instruction_reader ());
2215 }
2216
2217 static struct arm_prologue_cache *
2218 arm_make_prologue_cache (struct frame_info *this_frame)
2219 {
2220 int reg;
2221 struct arm_prologue_cache *cache;
2222 CORE_ADDR unwound_fp, prev_sp;
2223
2224 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2225 arm_cache_init (cache, this_frame);
2226
2227 arm_scan_prologue (this_frame, cache);
2228
2229 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2230 if (unwound_fp == 0)
2231 return cache;
2232
2233 arm_gdbarch_tdep *tdep =
2234 (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2235
2236 prev_sp = unwound_fp + cache->framesize;
2237 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2238
2239 /* Calculate actual addresses of saved registers using offsets
2240 determined by arm_scan_prologue. */
2241 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2242 if (cache->saved_regs[reg].is_addr ())
2243 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2244 prev_sp);
2245
2246 return cache;
2247 }
2248
2249 /* Implementation of the stop_reason hook for arm_prologue frames. */
2250
2251 static enum unwind_stop_reason
2252 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2253 void **this_cache)
2254 {
2255 struct arm_prologue_cache *cache;
2256 CORE_ADDR pc;
2257
2258 if (*this_cache == NULL)
2259 *this_cache = arm_make_prologue_cache (this_frame);
2260 cache = (struct arm_prologue_cache *) *this_cache;
2261
2262 /* This is meant to halt the backtrace at "_start". */
2263 pc = get_frame_pc (this_frame);
2264 gdbarch *arch = get_frame_arch (this_frame);
2265 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
2266 if (pc <= tdep->lowest_pc)
2267 return UNWIND_OUTERMOST;
2268
2269 /* If we've hit a wall, stop. */
2270 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2271 return UNWIND_OUTERMOST;
2272
2273 return UNWIND_NO_REASON;
2274 }
2275
2276 /* Our frame ID for a normal frame is the current function's starting PC
2277 and the caller's SP when we were called. */
2278
2279 static void
2280 arm_prologue_this_id (struct frame_info *this_frame,
2281 void **this_cache,
2282 struct frame_id *this_id)
2283 {
2284 struct arm_prologue_cache *cache;
2285 struct frame_id id;
2286 CORE_ADDR pc, func;
2287
2288 if (*this_cache == NULL)
2289 *this_cache = arm_make_prologue_cache (this_frame);
2290 cache = (struct arm_prologue_cache *) *this_cache;
2291
2292 arm_gdbarch_tdep *tdep
2293 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2294
2295 /* Use function start address as part of the frame ID. If we cannot
2296 identify the start address (due to missing symbol information),
2297 fall back to just using the current PC. */
2298 pc = get_frame_pc (this_frame);
2299 func = get_frame_func (this_frame);
2300 if (!func)
2301 func = pc;
2302
2303 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2304 *this_id = id;
2305 }
2306
2307 static struct value *
2308 arm_prologue_prev_register (struct frame_info *this_frame,
2309 void **this_cache,
2310 int prev_regnum)
2311 {
2312 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2313 struct arm_prologue_cache *cache;
2314 CORE_ADDR sp_value;
2315
2316 if (*this_cache == NULL)
2317 *this_cache = arm_make_prologue_cache (this_frame);
2318 cache = (struct arm_prologue_cache *) *this_cache;
2319
2320 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2321
2322 /* If this frame has signed the return address, mark it as so. */
2323 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2324 && *cache->ra_signed_state)
2325 set_frame_previous_pc_masked (this_frame);
2326
2327 /* If we are asked to unwind the PC, then we need to return the LR
2328 instead. The prologue may save PC, but it will point into this
2329 frame's prologue, not the next frame's resume location. Also
2330 strip the saved T bit. A valid LR may have the low bit set, but
2331 a valid PC never does. */
2332 if (prev_regnum == ARM_PC_REGNUM)
2333 {
2334 CORE_ADDR lr;
2335
2336 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2337 return frame_unwind_got_constant (this_frame, prev_regnum,
2338 arm_addr_bits_remove (gdbarch, lr));
2339 }
2340
2341 /* SP is generally not saved to the stack, but this frame is
2342 identified by the next frame's stack pointer at the time of the call.
2343 The value was already reconstructed into PREV_SP. */
2344 if (prev_regnum == ARM_SP_REGNUM)
2345 return frame_unwind_got_constant (this_frame, prev_regnum,
2346 arm_cache_get_prev_sp_value (cache, tdep));
2347
2348 /* The value might be one of the alternative SP, if so, use the
2349 value already constructed. */
2350 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
2351 {
2352 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2353 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2354 }
2355
2356 /* The CPSR may have been changed by the call instruction and by the
2357 called function. The only bit we can reconstruct is the T bit,
2358 by checking the low bit of LR as of the call. This is a reliable
2359 indicator of Thumb-ness except for some ARM v4T pre-interworking
2360 Thumb code, which could get away with a clear low bit as long as
2361 the called function did not use bx. Guess that all other
2362 bits are unchanged; the condition flags are presumably lost,
2363 but the processor status is likely valid. */
2364 if (prev_regnum == ARM_PS_REGNUM)
2365 {
2366 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2367 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2368
2369 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2370 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2371 }
2372
2373 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2374 prev_regnum);
2375 }
2376
2377 static frame_unwind arm_prologue_unwind = {
2378 "arm prologue",
2379 NORMAL_FRAME,
2380 arm_prologue_unwind_stop_reason,
2381 arm_prologue_this_id,
2382 arm_prologue_prev_register,
2383 NULL,
2384 default_frame_sniffer
2385 };
2386
2387 /* Maintain a list of ARM exception table entries per objfile, similar to the
2388 list of mapping symbols. We only cache entries for standard ARM-defined
2389 personality routines; the cache will contain only the frame unwinding
2390 instructions associated with the entry (not the descriptors). */
2391
2392 struct arm_exidx_entry
2393 {
2394 CORE_ADDR addr;
2395 gdb_byte *entry;
2396
2397 bool operator< (const arm_exidx_entry &other) const
2398 {
2399 return addr < other.addr;
2400 }
2401 };
2402
2403 struct arm_exidx_data
2404 {
2405 std::vector<std::vector<arm_exidx_entry>> section_maps;
2406 };
2407
2408 /* Per-BFD key to store exception handling information. */
2409 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2410
2411 static struct obj_section *
2412 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2413 {
2414 struct obj_section *osect;
2415
2416 ALL_OBJFILE_OSECTIONS (objfile, osect)
2417 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2418 {
2419 bfd_vma start, size;
2420 start = bfd_section_vma (osect->the_bfd_section);
2421 size = bfd_section_size (osect->the_bfd_section);
2422
2423 if (start <= vma && vma < start + size)
2424 return osect;
2425 }
2426
2427 return NULL;
2428 }
2429
2430 /* Parse contents of exception table and exception index sections
2431 of OBJFILE, and fill in the exception table entry cache.
2432
2433 For each entry that refers to a standard ARM-defined personality
2434 routine, extract the frame unwinding instructions (from either
2435 the index or the table section). The unwinding instructions
2436 are normalized by:
2437 - extracting them from the rest of the table data
2438 - converting to host endianness
2439 - appending the implicit 0xb0 ("Finish") code
2440
2441 The extracted and normalized instructions are stored for later
2442 retrieval by the arm_find_exidx_entry routine. */
2443
2444 static void
2445 arm_exidx_new_objfile (struct objfile *objfile)
2446 {
2447 struct arm_exidx_data *data;
2448 asection *exidx, *extab;
2449 bfd_vma exidx_vma = 0, extab_vma = 0;
2450 LONGEST i;
2451
2452 /* If we've already touched this file, do nothing. */
2453 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2454 return;
2455
2456 /* Read contents of exception table and index. */
2457 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2458 gdb::byte_vector exidx_data;
2459 if (exidx)
2460 {
2461 exidx_vma = bfd_section_vma (exidx);
2462 exidx_data.resize (bfd_section_size (exidx));
2463
2464 if (!bfd_get_section_contents (objfile->obfd, exidx,
2465 exidx_data.data (), 0,
2466 exidx_data.size ()))
2467 return;
2468 }
2469
2470 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2471 gdb::byte_vector extab_data;
2472 if (extab)
2473 {
2474 extab_vma = bfd_section_vma (extab);
2475 extab_data.resize (bfd_section_size (extab));
2476
2477 if (!bfd_get_section_contents (objfile->obfd, extab,
2478 extab_data.data (), 0,
2479 extab_data.size ()))
2480 return;
2481 }
2482
2483 /* Allocate exception table data structure. */
2484 data = arm_exidx_data_key.emplace (objfile->obfd);
2485 data->section_maps.resize (objfile->obfd->section_count);
2486
2487 /* Fill in exception table. */
2488 for (i = 0; i < exidx_data.size () / 8; i++)
2489 {
2490 struct arm_exidx_entry new_exidx_entry;
2491 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2492 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2493 exidx_data.data () + i * 8 + 4);
2494 bfd_vma addr = 0, word = 0;
2495 int n_bytes = 0, n_words = 0;
2496 struct obj_section *sec;
2497 gdb_byte *entry = NULL;
2498
2499 /* Extract address of start of function. */
2500 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2501 idx += exidx_vma + i * 8;
2502
2503 /* Find section containing function and compute section offset. */
2504 sec = arm_obj_section_from_vma (objfile, idx);
2505 if (sec == NULL)
2506 continue;
2507 idx -= bfd_section_vma (sec->the_bfd_section);
2508
2509 /* Determine address of exception table entry. */
2510 if (val == 1)
2511 {
2512 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2513 }
2514 else if ((val & 0xff000000) == 0x80000000)
2515 {
2516 /* Exception table entry embedded in .ARM.exidx
2517 -- must be short form. */
2518 word = val;
2519 n_bytes = 3;
2520 }
2521 else if (!(val & 0x80000000))
2522 {
2523 /* Exception table entry in .ARM.extab. */
2524 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2525 addr += exidx_vma + i * 8 + 4;
2526
2527 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2528 {
2529 word = bfd_h_get_32 (objfile->obfd,
2530 extab_data.data () + addr - extab_vma);
2531 addr += 4;
2532
2533 if ((word & 0xff000000) == 0x80000000)
2534 {
2535 /* Short form. */
2536 n_bytes = 3;
2537 }
2538 else if ((word & 0xff000000) == 0x81000000
2539 || (word & 0xff000000) == 0x82000000)
2540 {
2541 /* Long form. */
2542 n_bytes = 2;
2543 n_words = ((word >> 16) & 0xff);
2544 }
2545 else if (!(word & 0x80000000))
2546 {
2547 bfd_vma pers;
2548 struct obj_section *pers_sec;
2549 int gnu_personality = 0;
2550
2551 /* Custom personality routine. */
2552 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2553 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2554
2555 /* Check whether we've got one of the variants of the
2556 GNU personality routines. */
2557 pers_sec = arm_obj_section_from_vma (objfile, pers);
2558 if (pers_sec)
2559 {
2560 static const char *personality[] =
2561 {
2562 "__gcc_personality_v0",
2563 "__gxx_personality_v0",
2564 "__gcj_personality_v0",
2565 "__gnu_objc_personality_v0",
2566 NULL
2567 };
2568
2569 CORE_ADDR pc = pers + pers_sec->offset ();
2570 int k;
2571
2572 for (k = 0; personality[k]; k++)
2573 if (lookup_minimal_symbol_by_pc_name
2574 (pc, personality[k], objfile))
2575 {
2576 gnu_personality = 1;
2577 break;
2578 }
2579 }
2580
2581 /* If so, the next word contains a word count in the high
2582 byte, followed by the same unwind instructions as the
2583 pre-defined forms. */
2584 if (gnu_personality
2585 && addr + 4 <= extab_vma + extab_data.size ())
2586 {
2587 word = bfd_h_get_32 (objfile->obfd,
2588 (extab_data.data ()
2589 + addr - extab_vma));
2590 addr += 4;
2591 n_bytes = 3;
2592 n_words = ((word >> 24) & 0xff);
2593 }
2594 }
2595 }
2596 }
2597
2598 /* Sanity check address. */
2599 if (n_words)
2600 if (addr < extab_vma
2601 || addr + 4 * n_words > extab_vma + extab_data.size ())
2602 n_words = n_bytes = 0;
2603
2604 /* The unwind instructions reside in WORD (only the N_BYTES least
2605 significant bytes are valid), followed by N_WORDS words in the
2606 extab section starting at ADDR. */
2607 if (n_bytes || n_words)
2608 {
2609 gdb_byte *p = entry
2610 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2611 n_bytes + n_words * 4 + 1);
2612
2613 while (n_bytes--)
2614 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2615
2616 while (n_words--)
2617 {
2618 word = bfd_h_get_32 (objfile->obfd,
2619 extab_data.data () + addr - extab_vma);
2620 addr += 4;
2621
2622 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2623 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2624 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2625 *p++ = (gdb_byte) (word & 0xff);
2626 }
2627
2628 /* Implied "Finish" to terminate the list. */
2629 *p++ = 0xb0;
2630 }
2631
2632 /* Push entry onto vector. They are guaranteed to always
2633 appear in order of increasing addresses. */
2634 new_exidx_entry.addr = idx;
2635 new_exidx_entry.entry = entry;
2636 data->section_maps[sec->the_bfd_section->index].push_back
2637 (new_exidx_entry);
2638 }
2639 }
2640
2641 /* Search for the exception table entry covering MEMADDR. If one is found,
2642 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2643 set *START to the start of the region covered by this entry. */
2644
2645 static gdb_byte *
2646 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2647 {
2648 struct obj_section *sec;
2649
2650 sec = find_pc_section (memaddr);
2651 if (sec != NULL)
2652 {
2653 struct arm_exidx_data *data;
2654 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2655
2656 data = arm_exidx_data_key.get (sec->objfile->obfd);
2657 if (data != NULL)
2658 {
2659 std::vector<arm_exidx_entry> &map
2660 = data->section_maps[sec->the_bfd_section->index];
2661 if (!map.empty ())
2662 {
2663 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2664
2665 /* std::lower_bound finds the earliest ordered insertion
2666 point. If the following symbol starts at this exact
2667 address, we use that; otherwise, the preceding
2668 exception table entry covers this address. */
2669 if (idx < map.end ())
2670 {
2671 if (idx->addr == map_key.addr)
2672 {
2673 if (start)
2674 *start = idx->addr + sec->addr ();
2675 return idx->entry;
2676 }
2677 }
2678
2679 if (idx > map.begin ())
2680 {
2681 idx = idx - 1;
2682 if (start)
2683 *start = idx->addr + sec->addr ();
2684 return idx->entry;
2685 }
2686 }
2687 }
2688 }
2689
2690 return NULL;
2691 }
2692
2693 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2694 instruction list from the ARM exception table entry ENTRY, allocate and
2695 return a prologue cache structure describing how to unwind this frame.
2696
2697 Return NULL if the unwinding instruction list contains a "spare",
2698 "reserved" or "refuse to unwind" instruction as defined in section
2699 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2700 for the ARM Architecture" document. */
2701
2702 static struct arm_prologue_cache *
2703 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2704 {
2705 CORE_ADDR vsp = 0;
2706 int vsp_valid = 0;
2707
2708 struct arm_prologue_cache *cache;
2709 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2710 arm_cache_init (cache, this_frame);
2711
2712 for (;;)
2713 {
2714 gdb_byte insn;
2715
2716 /* Whenever we reload SP, we actually have to retrieve its
2717 actual value in the current frame. */
2718 if (!vsp_valid)
2719 {
2720 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2721 {
2722 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2723 vsp = get_frame_register_unsigned (this_frame, reg);
2724 }
2725 else
2726 {
2727 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2728 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2729 }
2730
2731 vsp_valid = 1;
2732 }
2733
2734 /* Decode next unwind instruction. */
2735 insn = *entry++;
2736
2737 if ((insn & 0xc0) == 0)
2738 {
2739 int offset = insn & 0x3f;
2740 vsp += (offset << 2) + 4;
2741 }
2742 else if ((insn & 0xc0) == 0x40)
2743 {
2744 int offset = insn & 0x3f;
2745 vsp -= (offset << 2) + 4;
2746 }
2747 else if ((insn & 0xf0) == 0x80)
2748 {
2749 int mask = ((insn & 0xf) << 8) | *entry++;
2750 int i;
2751
2752 /* The special case of an all-zero mask identifies
2753 "Refuse to unwind". We return NULL to fall back
2754 to the prologue analyzer. */
2755 if (mask == 0)
2756 return NULL;
2757
2758 /* Pop registers r4..r15 under mask. */
2759 for (i = 0; i < 12; i++)
2760 if (mask & (1 << i))
2761 {
2762 cache->saved_regs[4 + i].set_addr (vsp);
2763 vsp += 4;
2764 }
2765
2766 /* Special-case popping SP -- we need to reload vsp. */
2767 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2768 vsp_valid = 0;
2769 }
2770 else if ((insn & 0xf0) == 0x90)
2771 {
2772 int reg = insn & 0xf;
2773
2774 /* Reserved cases. */
2775 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2776 return NULL;
2777
2778 /* Set SP from another register and mark VSP for reload. */
2779 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2780 vsp_valid = 0;
2781 }
2782 else if ((insn & 0xf0) == 0xa0)
2783 {
2784 int count = insn & 0x7;
2785 int pop_lr = (insn & 0x8) != 0;
2786 int i;
2787
2788 /* Pop r4..r[4+count]. */
2789 for (i = 0; i <= count; i++)
2790 {
2791 cache->saved_regs[4 + i].set_addr (vsp);
2792 vsp += 4;
2793 }
2794
2795 /* If indicated by flag, pop LR as well. */
2796 if (pop_lr)
2797 {
2798 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2799 vsp += 4;
2800 }
2801 }
2802 else if (insn == 0xb0)
2803 {
2804 /* We could only have updated PC by popping into it; if so, it
2805 will show up as address. Otherwise, copy LR into PC. */
2806 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2807 cache->saved_regs[ARM_PC_REGNUM]
2808 = cache->saved_regs[ARM_LR_REGNUM];
2809
2810 /* We're done. */
2811 break;
2812 }
2813 else if (insn == 0xb1)
2814 {
2815 int mask = *entry++;
2816 int i;
2817
2818 /* All-zero mask and mask >= 16 is "spare". */
2819 if (mask == 0 || mask >= 16)
2820 return NULL;
2821
2822 /* Pop r0..r3 under mask. */
2823 for (i = 0; i < 4; i++)
2824 if (mask & (1 << i))
2825 {
2826 cache->saved_regs[i].set_addr (vsp);
2827 vsp += 4;
2828 }
2829 }
2830 else if (insn == 0xb2)
2831 {
2832 ULONGEST offset = 0;
2833 unsigned shift = 0;
2834
2835 do
2836 {
2837 offset |= (*entry & 0x7f) << shift;
2838 shift += 7;
2839 }
2840 while (*entry++ & 0x80);
2841
2842 vsp += 0x204 + (offset << 2);
2843 }
2844 else if (insn == 0xb3)
2845 {
2846 int start = *entry >> 4;
2847 int count = (*entry++) & 0xf;
2848 int i;
2849
2850 /* Only registers D0..D15 are valid here. */
2851 if (start + count >= 16)
2852 return NULL;
2853
2854 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2855 for (i = 0; i <= count; i++)
2856 {
2857 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2858 vsp += 8;
2859 }
2860
2861 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2862 vsp += 4;
2863 }
2864 else if ((insn & 0xf8) == 0xb8)
2865 {
2866 int count = insn & 0x7;
2867 int i;
2868
2869 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2870 for (i = 0; i <= count; i++)
2871 {
2872 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2873 vsp += 8;
2874 }
2875
2876 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2877 vsp += 4;
2878 }
2879 else if (insn == 0xc6)
2880 {
2881 int start = *entry >> 4;
2882 int count = (*entry++) & 0xf;
2883 int i;
2884
2885 /* Only registers WR0..WR15 are valid. */
2886 if (start + count >= 16)
2887 return NULL;
2888
2889 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2890 for (i = 0; i <= count; i++)
2891 {
2892 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2893 vsp += 8;
2894 }
2895 }
2896 else if (insn == 0xc7)
2897 {
2898 int mask = *entry++;
2899 int i;
2900
2901 /* All-zero mask and mask >= 16 is "spare". */
2902 if (mask == 0 || mask >= 16)
2903 return NULL;
2904
2905 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2906 for (i = 0; i < 4; i++)
2907 if (mask & (1 << i))
2908 {
2909 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2910 vsp += 4;
2911 }
2912 }
2913 else if ((insn & 0xf8) == 0xc0)
2914 {
2915 int count = insn & 0x7;
2916 int i;
2917
2918 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2919 for (i = 0; i <= count; i++)
2920 {
2921 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2922 vsp += 8;
2923 }
2924 }
2925 else if (insn == 0xc8)
2926 {
2927 int start = *entry >> 4;
2928 int count = (*entry++) & 0xf;
2929 int i;
2930
2931 /* Only registers D0..D31 are valid. */
2932 if (start + count >= 16)
2933 return NULL;
2934
2935 /* Pop VFP double-precision registers
2936 D[16+start]..D[16+start+count]. */
2937 for (i = 0; i <= count; i++)
2938 {
2939 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2940 vsp += 8;
2941 }
2942 }
2943 else if (insn == 0xc9)
2944 {
2945 int start = *entry >> 4;
2946 int count = (*entry++) & 0xf;
2947 int i;
2948
2949 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2950 for (i = 0; i <= count; i++)
2951 {
2952 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2953 vsp += 8;
2954 }
2955 }
2956 else if ((insn & 0xf8) == 0xd0)
2957 {
2958 int count = insn & 0x7;
2959 int i;
2960
2961 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2962 for (i = 0; i <= count; i++)
2963 {
2964 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2965 vsp += 8;
2966 }
2967 }
2968 else
2969 {
2970 /* Everything else is "spare". */
2971 return NULL;
2972 }
2973 }
2974
2975 /* If we restore SP from a register, assume this was the frame register.
2976 Otherwise just fall back to SP as frame register. */
2977 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2978 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2979 else
2980 cache->framereg = ARM_SP_REGNUM;
2981
2982 /* Determine offset to previous frame. */
2983 cache->framesize
2984 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2985
2986 /* We already got the previous SP. */
2987 arm_gdbarch_tdep *tdep
2988 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2989 arm_cache_set_active_sp_value (cache, tdep, vsp);
2990
2991 return cache;
2992 }
2993
2994 /* Unwinding via ARM exception table entries. Note that the sniffer
2995 already computes a filled-in prologue cache, which is then used
2996 with the same arm_prologue_this_id and arm_prologue_prev_register
2997 routines also used for prologue-parsing based unwinding. */
2998
2999 static int
3000 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3001 struct frame_info *this_frame,
3002 void **this_prologue_cache)
3003 {
3004 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3005 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3006 CORE_ADDR addr_in_block, exidx_region, func_start;
3007 struct arm_prologue_cache *cache;
3008 gdb_byte *entry;
3009
3010 /* See if we have an ARM exception table entry covering this address. */
3011 addr_in_block = get_frame_address_in_block (this_frame);
3012 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3013 if (!entry)
3014 return 0;
3015
3016 /* The ARM exception table does not describe unwind information
3017 for arbitrary PC values, but is guaranteed to be correct only
3018 at call sites. We have to decide here whether we want to use
3019 ARM exception table information for this frame, or fall back
3020 to using prologue parsing. (Note that if we have DWARF CFI,
3021 this sniffer isn't even called -- CFI is always preferred.)
3022
3023 Before we make this decision, however, we check whether we
3024 actually have *symbol* information for the current frame.
3025 If not, prologue parsing would not work anyway, so we might
3026 as well use the exception table and hope for the best. */
3027 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3028 {
3029 int exc_valid = 0;
3030
3031 /* If the next frame is "normal", we are at a call site in this
3032 frame, so exception information is guaranteed to be valid. */
3033 if (get_next_frame (this_frame)
3034 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3035 exc_valid = 1;
3036
3037 /* We also assume exception information is valid if we're currently
3038 blocked in a system call. The system library is supposed to
3039 ensure this, so that e.g. pthread cancellation works. */
3040 if (arm_frame_is_thumb (this_frame))
3041 {
3042 ULONGEST insn;
3043
3044 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
3045 2, byte_order_for_code, &insn)
3046 && (insn & 0xff00) == 0xdf00 /* svc */)
3047 exc_valid = 1;
3048 }
3049 else
3050 {
3051 ULONGEST insn;
3052
3053 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
3054 4, byte_order_for_code, &insn)
3055 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3056 exc_valid = 1;
3057 }
3058
3059 /* Bail out if we don't know that exception information is valid. */
3060 if (!exc_valid)
3061 return 0;
3062
3063 /* The ARM exception index does not mark the *end* of the region
3064 covered by the entry, and some functions will not have any entry.
3065 To correctly recognize the end of the covered region, the linker
3066 should have inserted dummy records with a CANTUNWIND marker.
3067
3068 Unfortunately, current versions of GNU ld do not reliably do
3069 this, and thus we may have found an incorrect entry above.
3070 As a (temporary) sanity check, we only use the entry if it
3071 lies *within* the bounds of the function. Note that this check
3072 might reject perfectly valid entries that just happen to cover
3073 multiple functions; therefore this check ought to be removed
3074 once the linker is fixed. */
3075 if (func_start > exidx_region)
3076 return 0;
3077 }
3078
3079 /* Decode the list of unwinding instructions into a prologue cache.
3080 Note that this may fail due to e.g. a "refuse to unwind" code. */
3081 cache = arm_exidx_fill_cache (this_frame, entry);
3082 if (!cache)
3083 return 0;
3084
3085 *this_prologue_cache = cache;
3086 return 1;
3087 }
3088
3089 struct frame_unwind arm_exidx_unwind = {
3090 "arm exidx",
3091 NORMAL_FRAME,
3092 default_frame_unwind_stop_reason,
3093 arm_prologue_this_id,
3094 arm_prologue_prev_register,
3095 NULL,
3096 arm_exidx_unwind_sniffer
3097 };
3098
3099 static struct arm_prologue_cache *
3100 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
3101 {
3102 struct arm_prologue_cache *cache;
3103 int reg;
3104
3105 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3106 arm_cache_init (cache, this_frame);
3107
3108 /* Still rely on the offset calculated from prologue. */
3109 arm_scan_prologue (this_frame, cache);
3110
3111 /* Since we are in epilogue, the SP has been restored. */
3112 arm_gdbarch_tdep *tdep
3113 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3114 arm_cache_set_active_sp_value (cache, tdep,
3115 get_frame_register_unsigned (this_frame,
3116 ARM_SP_REGNUM));
3117
3118 /* Calculate actual addresses of saved registers using offsets
3119 determined by arm_scan_prologue. */
3120 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3121 if (cache->saved_regs[reg].is_addr ())
3122 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3123 + arm_cache_get_prev_sp_value (cache, tdep));
3124
3125 return cache;
3126 }
3127
3128 /* Implementation of function hook 'this_id' in
3129 'struct frame_uwnind' for epilogue unwinder. */
3130
3131 static void
3132 arm_epilogue_frame_this_id (struct frame_info *this_frame,
3133 void **this_cache,
3134 struct frame_id *this_id)
3135 {
3136 struct arm_prologue_cache *cache;
3137 CORE_ADDR pc, func;
3138
3139 if (*this_cache == NULL)
3140 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3141 cache = (struct arm_prologue_cache *) *this_cache;
3142
3143 /* Use function start address as part of the frame ID. If we cannot
3144 identify the start address (due to missing symbol information),
3145 fall back to just using the current PC. */
3146 pc = get_frame_pc (this_frame);
3147 func = get_frame_func (this_frame);
3148 if (func == 0)
3149 func = pc;
3150
3151 arm_gdbarch_tdep *tdep
3152 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3153 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3154 }
3155
3156 /* Implementation of function hook 'prev_register' in
3157 'struct frame_uwnind' for epilogue unwinder. */
3158
3159 static struct value *
3160 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
3161 void **this_cache, int regnum)
3162 {
3163 if (*this_cache == NULL)
3164 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3165
3166 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3167 }
3168
3169 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3170 CORE_ADDR pc);
3171 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3172 CORE_ADDR pc);
3173
3174 /* Implementation of function hook 'sniffer' in
3175 'struct frame_uwnind' for epilogue unwinder. */
3176
3177 static int
3178 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3179 struct frame_info *this_frame,
3180 void **this_prologue_cache)
3181 {
3182 if (frame_relative_level (this_frame) == 0)
3183 {
3184 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3185 CORE_ADDR pc = get_frame_pc (this_frame);
3186
3187 if (arm_frame_is_thumb (this_frame))
3188 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3189 else
3190 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3191 }
3192 else
3193 return 0;
3194 }
3195
3196 /* Frame unwinder from epilogue. */
3197
3198 static const struct frame_unwind arm_epilogue_frame_unwind =
3199 {
3200 "arm epilogue",
3201 NORMAL_FRAME,
3202 default_frame_unwind_stop_reason,
3203 arm_epilogue_frame_this_id,
3204 arm_epilogue_frame_prev_register,
3205 NULL,
3206 arm_epilogue_frame_sniffer,
3207 };
3208
3209 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3210 trampoline, return the target PC. Otherwise return 0.
3211
3212 void call0a (char c, short s, int i, long l) {}
3213
3214 int main (void)
3215 {
3216 (*pointer_to_call0a) (c, s, i, l);
3217 }
3218
3219 Instead of calling a stub library function _call_via_xx (xx is
3220 the register name), GCC may inline the trampoline in the object
3221 file as below (register r2 has the address of call0a).
3222
3223 .global main
3224 .type main, %function
3225 ...
3226 bl .L1
3227 ...
3228 .size main, .-main
3229
3230 .L1:
3231 bx r2
3232
3233 The trampoline 'bx r2' doesn't belong to main. */
3234
3235 static CORE_ADDR
3236 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
3237 {
3238 /* The heuristics of recognizing such trampoline is that FRAME is
3239 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3240 if (arm_frame_is_thumb (frame))
3241 {
3242 gdb_byte buf[2];
3243
3244 if (target_read_memory (pc, buf, 2) == 0)
3245 {
3246 struct gdbarch *gdbarch = get_frame_arch (frame);
3247 enum bfd_endian byte_order_for_code
3248 = gdbarch_byte_order_for_code (gdbarch);
3249 uint16_t insn
3250 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3251
3252 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3253 {
3254 CORE_ADDR dest
3255 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3256
3257 /* Clear the LSB so that gdb core sets step-resume
3258 breakpoint at the right address. */
3259 return UNMAKE_THUMB_ADDR (dest);
3260 }
3261 }
3262 }
3263
3264 return 0;
3265 }
3266
3267 static struct arm_prologue_cache *
3268 arm_make_stub_cache (struct frame_info *this_frame)
3269 {
3270 struct arm_prologue_cache *cache;
3271
3272 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3273 arm_cache_init (cache, this_frame);
3274
3275 arm_gdbarch_tdep *tdep
3276 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3277 arm_cache_set_active_sp_value (cache, tdep,
3278 get_frame_register_unsigned (this_frame,
3279 ARM_SP_REGNUM));
3280
3281 return cache;
3282 }
3283
3284 /* Our frame ID for a stub frame is the current SP and LR. */
3285
3286 static void
3287 arm_stub_this_id (struct frame_info *this_frame,
3288 void **this_cache,
3289 struct frame_id *this_id)
3290 {
3291 struct arm_prologue_cache *cache;
3292
3293 if (*this_cache == NULL)
3294 *this_cache = arm_make_stub_cache (this_frame);
3295 cache = (struct arm_prologue_cache *) *this_cache;
3296
3297 arm_gdbarch_tdep *tdep
3298 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3299 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3300 get_frame_pc (this_frame));
3301 }
3302
3303 static int
3304 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3305 struct frame_info *this_frame,
3306 void **this_prologue_cache)
3307 {
3308 CORE_ADDR addr_in_block;
3309 gdb_byte dummy[4];
3310 CORE_ADDR pc, start_addr;
3311 const char *name;
3312
3313 addr_in_block = get_frame_address_in_block (this_frame);
3314 pc = get_frame_pc (this_frame);
3315 if (in_plt_section (addr_in_block)
3316 /* We also use the stub winder if the target memory is unreadable
3317 to avoid having the prologue unwinder trying to read it. */
3318 || target_read_memory (pc, dummy, 4) != 0)
3319 return 1;
3320
3321 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3322 && arm_skip_bx_reg (this_frame, pc) != 0)
3323 return 1;
3324
3325 return 0;
3326 }
3327
3328 struct frame_unwind arm_stub_unwind = {
3329 "arm stub",
3330 NORMAL_FRAME,
3331 default_frame_unwind_stop_reason,
3332 arm_stub_this_id,
3333 arm_prologue_prev_register,
3334 NULL,
3335 arm_stub_unwind_sniffer
3336 };
3337
3338 /* Put here the code to store, into CACHE->saved_regs, the addresses
3339 of the saved registers of frame described by THIS_FRAME. CACHE is
3340 returned. */
3341
3342 static struct arm_prologue_cache *
3343 arm_m_exception_cache (struct frame_info *this_frame)
3344 {
3345 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3346 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3347 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3348 struct arm_prologue_cache *cache;
3349 CORE_ADDR lr;
3350 CORE_ADDR sp;
3351 CORE_ADDR unwound_sp;
3352 uint32_t sp_r0_offset = 0;
3353 LONGEST xpsr;
3354 uint32_t exc_return;
3355 bool fnc_return;
3356 uint32_t extended_frame_used;
3357 bool secure_stack_used = false;
3358 bool default_callee_register_stacking = false;
3359 bool exception_domain_is_secure = false;
3360
3361 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3362 arm_cache_init (cache, this_frame);
3363
3364 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3365 describes which bits in LR that define which stack was used prior
3366 to the exception and if FPU is used (causing extended stack frame). */
3367
3368 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3369 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3370
3371 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3372 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3373 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3374 reset if Main Extension is implemented, otherwise the value is unknown. */
3375 if (lr == 0xffffffff)
3376 {
3377 /* Terminate any further stack unwinding by referring to self. */
3378 arm_cache_set_active_sp_value (cache, tdep, sp);
3379 return cache;
3380 }
3381
3382 fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3383 if (tdep->have_sec_ext && fnc_return)
3384 {
3385 if (!arm_unwind_secure_frames)
3386 {
3387 warning (_("Non-secure to secure stack unwinding disabled."));
3388
3389 /* Terminate any further stack unwinding by referring to self. */
3390 arm_cache_set_active_sp_value (cache, tdep, sp);
3391 return cache;
3392 }
3393
3394 xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3395 if ((xpsr & 0xff) != 0)
3396 /* Handler mode: This is the mode that exceptions are handled in. */
3397 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3398 else
3399 /* Thread mode: This is the normal mode that programs run in. */
3400 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3401
3402 unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3403
3404 /* Stack layout for a function call from Secure to Non-Secure state
3405 (ARMv8-M section B3.16):
3406
3407 SP Offset
3408
3409 +-------------------+
3410 0x08 | |
3411 +-------------------+ <-- Original SP
3412 0x04 | Partial xPSR |
3413 +-------------------+
3414 0x00 | Return Address |
3415 +===================+ <-- New SP */
3416
3417 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3418 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3419 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3420
3421 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3422
3423 return cache;
3424 }
3425
3426 /* Check EXC_RETURN indicator bits (24-31). */
3427 exc_return = (((lr >> 24) & 0xff) == 0xff);
3428 if (exc_return)
3429 {
3430 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3431 bool process_stack_used = ((lr & (1 << 2)) != 0);
3432
3433 if (tdep->have_sec_ext)
3434 {
3435 secure_stack_used = ((lr & (1 << 6)) != 0);
3436 default_callee_register_stacking = ((lr & (1 << 5)) != 0);
3437 exception_domain_is_secure = ((lr & (1 << 0)) == 0);
3438
3439 /* Unwinding from non-secure to secure can trip security
3440 measures. In order to avoid the debugger being
3441 intrusive, rely on the user to configure the requested
3442 mode. */
3443 if (secure_stack_used && !exception_domain_is_secure
3444 && !arm_unwind_secure_frames)
3445 {
3446 warning (_("Non-secure to secure stack unwinding disabled."));
3447
3448 /* Terminate any further stack unwinding by referring to self. */
3449 arm_cache_set_active_sp_value (cache, tdep, sp);
3450 return cache;
3451 }
3452
3453 if (process_stack_used)
3454 {
3455 if (secure_stack_used)
3456 /* Secure thread (process) stack used, use PSP_S as SP. */
3457 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3458 else
3459 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3460 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_ns_regnum);
3461 }
3462 else
3463 {
3464 if (secure_stack_used)
3465 /* Secure main stack used, use MSP_S as SP. */
3466 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3467 else
3468 /* Non-secure main stack used, use MSP_NS as SP. */
3469 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_ns_regnum);
3470 }
3471 }
3472 else
3473 {
3474 if (process_stack_used)
3475 /* Thread (process) stack used, use PSP as SP. */
3476 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_regnum);
3477 else
3478 /* Main stack used, use MSP as SP. */
3479 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_regnum);
3480 }
3481 }
3482
3483 /* Fetch the SP to use for this frame. */
3484 unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3485
3486 /* Exception entry context stacking are described in ARMv8-M (section B3.19)
3487 and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference Manuals.
3488
3489 The following figure shows the structure of the stack frame when Security
3490 and Floating-point extensions are present.
3491
3492 SP Offsets
3493 Without With
3494 Callee Regs Callee Regs
3495 (Secure -> Non-Secure)
3496 +-------------------+
3497 0xA8 | | 0xD0
3498 +===================+ --+ <-- Original SP
3499 0xA4 | S31 | 0xCC |
3500 +-------------------+ |
3501 ... | Additional FP context
3502 +-------------------+ |
3503 0x68 | S16 | 0x90 |
3504 +===================+ --+
3505 0x64 | Reserved | 0x8C |
3506 +-------------------+ |
3507 0x60 | FPSCR | 0x88 |
3508 +-------------------+ |
3509 0x5C | S15 | 0x84 | FP context
3510 +-------------------+ |
3511 ... |
3512 +-------------------+ |
3513 0x20 | S0 | 0x48 |
3514 +===================+ --+
3515 0x1C | xPSR | 0x44 |
3516 +-------------------+ |
3517 0x18 | Return address | 0x40 |
3518 +-------------------+ |
3519 0x14 | LR(R14) | 0x3C |
3520 +-------------------+ |
3521 0x10 | R12 | 0x38 | State context
3522 +-------------------+ |
3523 0x0C | R3 | 0x34 |
3524 +-------------------+ |
3525 ... |
3526 +-------------------+ |
3527 0x00 | R0 | 0x28 |
3528 +===================+ --+
3529 | R11 | 0x24 |
3530 +-------------------+ |
3531 ... |
3532 +-------------------+ | Additional state context
3533 | R4 | 0x08 | when transitioning from
3534 +-------------------+ | Secure to Non-Secure
3535 | Reserved | 0x04 |
3536 +-------------------+ |
3537 | Magic signature | 0x00 |
3538 +===================+ --+ <-- New SP */
3539
3540 /* With the Security extension, the hardware saves R4..R11 too. */
3541 if (exc_return && tdep->have_sec_ext && secure_stack_used
3542 && (!default_callee_register_stacking || exception_domain_is_secure))
3543 {
3544 /* Read R4..R11 from the integer callee registers. */
3545 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3546 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3547 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3548 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3549 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3550 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3551 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3552 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3553 sp_r0_offset = 0x28;
3554 }
3555
3556 /* The hardware saves eight 32-bit words, comprising xPSR,
3557 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3558 "B1.5.6 Exception entry behavior" in
3559 "ARMv7-M Architecture Reference Manual". */
3560 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3561 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3562 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3563 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3564 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x10);
3565 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x14);
3566 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x18);
3567 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x1C);
3568
3569 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3570 type used. */
3571 extended_frame_used = ((lr & (1 << 4)) == 0);
3572 if (exc_return && extended_frame_used)
3573 {
3574 int i;
3575 int fpu_regs_stack_offset;
3576 ULONGEST fpccr;
3577
3578 /* Read FPCCR register. */
3579 gdb_assert (safe_read_memory_unsigned_integer (FPCCR,
3580 ARM_INT_REGISTER_SIZE,
3581 byte_order, &fpccr));
3582 bool fpccr_ts = bit (fpccr,26);
3583
3584 /* This code does not take into account the lazy stacking, see "Lazy
3585 context save of FP state", in B1.5.7, also ARM AN298, supported
3586 by Cortex-M4F architecture.
3587 To fully handle this the FPCCR register (Floating-point Context
3588 Control Register) needs to be read out and the bits ASPEN and LSPEN
3589 could be checked to setup correct lazy stacked FP registers.
3590 This register is located at address 0xE000EF34. */
3591
3592 /* Extended stack frame type used. */
3593 fpu_regs_stack_offset = unwound_sp + sp_r0_offset + 0x20;
3594 for (i = 0; i < 8; i++)
3595 {
3596 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3597 fpu_regs_stack_offset += 8;
3598 }
3599 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x60);
3600 fpu_regs_stack_offset += 4;
3601
3602 if (tdep->have_sec_ext && !default_callee_register_stacking && fpccr_ts)
3603 {
3604 /* Handle floating-point callee saved registers. */
3605 fpu_regs_stack_offset = unwound_sp + sp_r0_offset + 0x68;
3606 for (i = 8; i < 16; i++)
3607 {
3608 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3609 fpu_regs_stack_offset += 8;
3610 }
3611
3612 arm_cache_set_active_sp_value (cache, tdep,
3613 unwound_sp + sp_r0_offset + 0xA8);
3614 }
3615 else
3616 {
3617 /* Offset 0x64 is reserved. */
3618 arm_cache_set_active_sp_value (cache, tdep,
3619 unwound_sp + sp_r0_offset + 0x68);
3620 }
3621 }
3622 else
3623 {
3624 /* Standard stack frame type used. */
3625 arm_cache_set_active_sp_value (cache, tdep,
3626 unwound_sp + sp_r0_offset + 0x20);
3627 }
3628
3629 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3630 aligner between the top of the 32-byte stack frame and the
3631 previous context's stack pointer. */
3632 if (safe_read_memory_integer (unwound_sp + sp_r0_offset + 0x1C, 4,
3633 byte_order, &xpsr)
3634 && (xpsr & (1 << 9)) != 0)
3635 arm_cache_set_active_sp_value (cache, tdep,
3636 arm_cache_get_prev_sp_value (cache, tdep) + 4);
3637
3638 return cache;
3639 }
3640
3641 /* Implementation of function hook 'this_id' in
3642 'struct frame_uwnind'. */
3643
3644 static void
3645 arm_m_exception_this_id (struct frame_info *this_frame,
3646 void **this_cache,
3647 struct frame_id *this_id)
3648 {
3649 struct arm_prologue_cache *cache;
3650
3651 if (*this_cache == NULL)
3652 *this_cache = arm_m_exception_cache (this_frame);
3653 cache = (struct arm_prologue_cache *) *this_cache;
3654
3655 /* Our frame ID for a stub frame is the current SP and LR. */
3656 arm_gdbarch_tdep *tdep
3657 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3658 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3659 get_frame_pc (this_frame));
3660 }
3661
3662 /* Implementation of function hook 'prev_register' in
3663 'struct frame_uwnind'. */
3664
3665 static struct value *
3666 arm_m_exception_prev_register (struct frame_info *this_frame,
3667 void **this_cache,
3668 int prev_regnum)
3669 {
3670 struct arm_prologue_cache *cache;
3671 CORE_ADDR sp_value;
3672
3673 if (*this_cache == NULL)
3674 *this_cache = arm_m_exception_cache (this_frame);
3675 cache = (struct arm_prologue_cache *) *this_cache;
3676
3677 /* The value was already reconstructed into PREV_SP. */
3678 arm_gdbarch_tdep *tdep
3679 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3680 if (prev_regnum == ARM_SP_REGNUM)
3681 return frame_unwind_got_constant (this_frame, prev_regnum,
3682 arm_cache_get_prev_sp_value (cache, tdep));
3683
3684 /* If we are asked to unwind the PC, strip the saved T bit. */
3685 if (prev_regnum == ARM_PC_REGNUM)
3686 {
3687 struct value *value = trad_frame_get_prev_register (this_frame,
3688 cache->saved_regs,
3689 prev_regnum);
3690 CORE_ADDR pc = value_as_address (value);
3691 return frame_unwind_got_constant (this_frame, prev_regnum,
3692 UNMAKE_THUMB_ADDR (pc));
3693 }
3694
3695 /* The value might be one of the alternative SP, if so, use the
3696 value already constructed. */
3697 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
3698 {
3699 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3700 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3701 }
3702
3703 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3704 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3705 pattern. */
3706 if (prev_regnum == ARM_PS_REGNUM)
3707 {
3708 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3709 struct value *value = trad_frame_get_prev_register (this_frame,
3710 cache->saved_regs,
3711 ARM_PC_REGNUM);
3712 CORE_ADDR pc = value_as_address (value);
3713 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3714 ARM_PS_REGNUM);
3715 ULONGEST xpsr = value_as_long (value);
3716
3717 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3718 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3719 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3720 }
3721
3722 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3723 prev_regnum);
3724 }
3725
3726 /* Implementation of function hook 'sniffer' in
3727 'struct frame_uwnind'. */
3728
3729 static int
3730 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3731 struct frame_info *this_frame,
3732 void **this_prologue_cache)
3733 {
3734 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3735 CORE_ADDR this_pc = get_frame_pc (this_frame);
3736
3737 /* No need to check is_m; this sniffer is only registered for
3738 M-profile architectures. */
3739
3740 /* Check if exception frame returns to a magic PC value. */
3741 return arm_m_addr_is_magic (gdbarch, this_pc);
3742 }
3743
3744 /* Frame unwinder for M-profile exceptions. */
3745
3746 struct frame_unwind arm_m_exception_unwind =
3747 {
3748 "arm m exception",
3749 SIGTRAMP_FRAME,
3750 default_frame_unwind_stop_reason,
3751 arm_m_exception_this_id,
3752 arm_m_exception_prev_register,
3753 NULL,
3754 arm_m_exception_unwind_sniffer
3755 };
3756
3757 static CORE_ADDR
3758 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3759 {
3760 struct arm_prologue_cache *cache;
3761
3762 if (*this_cache == NULL)
3763 *this_cache = arm_make_prologue_cache (this_frame);
3764 cache = (struct arm_prologue_cache *) *this_cache;
3765
3766 arm_gdbarch_tdep *tdep
3767 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3768 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3769 }
3770
3771 struct frame_base arm_normal_base = {
3772 &arm_prologue_unwind,
3773 arm_normal_frame_base,
3774 arm_normal_frame_base,
3775 arm_normal_frame_base
3776 };
3777
3778 static struct value *
3779 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3780 int regnum)
3781 {
3782 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3783 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3784 CORE_ADDR lr;
3785 ULONGEST cpsr;
3786
3787 switch (regnum)
3788 {
3789 case ARM_PC_REGNUM:
3790 /* The PC is normally copied from the return column, which
3791 describes saves of LR. However, that version may have an
3792 extra bit set to indicate Thumb state. The bit is not
3793 part of the PC. */
3794
3795 /* Record in the frame whether the return address was signed. */
3796 if (tdep->have_pacbti)
3797 {
3798 CORE_ADDR ra_auth_code
3799 = frame_unwind_register_unsigned (this_frame,
3800 tdep->pacbti_pseudo_base);
3801
3802 if (ra_auth_code != 0)
3803 set_frame_previous_pc_masked (this_frame);
3804 }
3805
3806 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3807 return frame_unwind_got_constant (this_frame, regnum,
3808 arm_addr_bits_remove (gdbarch, lr));
3809
3810 case ARM_PS_REGNUM:
3811 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3812 cpsr = get_frame_register_unsigned (this_frame, regnum);
3813 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3814 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
3815 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3816
3817 default:
3818 internal_error (__FILE__, __LINE__,
3819 _("Unexpected register %d"), regnum);
3820 }
3821 }
3822
3823 /* Implement the stack_frame_destroyed_p gdbarch method. */
3824
3825 static int
3826 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3827 {
3828 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3829 unsigned int insn, insn2;
3830 int found_return = 0, found_stack_adjust = 0;
3831 CORE_ADDR func_start, func_end;
3832 CORE_ADDR scan_pc;
3833 gdb_byte buf[4];
3834
3835 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3836 return 0;
3837
3838 /* The epilogue is a sequence of instructions along the following lines:
3839
3840 - add stack frame size to SP or FP
3841 - [if frame pointer used] restore SP from FP
3842 - restore registers from SP [may include PC]
3843 - a return-type instruction [if PC wasn't already restored]
3844
3845 In a first pass, we scan forward from the current PC and verify the
3846 instructions we find as compatible with this sequence, ending in a
3847 return instruction.
3848
3849 However, this is not sufficient to distinguish indirect function calls
3850 within a function from indirect tail calls in the epilogue in some cases.
3851 Therefore, if we didn't already find any SP-changing instruction during
3852 forward scan, we add a backward scanning heuristic to ensure we actually
3853 are in the epilogue. */
3854
3855 scan_pc = pc;
3856 while (scan_pc < func_end && !found_return)
3857 {
3858 if (target_read_memory (scan_pc, buf, 2))
3859 break;
3860
3861 scan_pc += 2;
3862 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3863
3864 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3865 found_return = 1;
3866 else if (insn == 0x46f7) /* mov pc, lr */
3867 found_return = 1;
3868 else if (thumb_instruction_restores_sp (insn))
3869 {
3870 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3871 found_return = 1;
3872 }
3873 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3874 {
3875 if (target_read_memory (scan_pc, buf, 2))
3876 break;
3877
3878 scan_pc += 2;
3879 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3880
3881 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3882 {
3883 if (insn2 & 0x8000) /* <registers> include PC. */
3884 found_return = 1;
3885 }
3886 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3887 && (insn2 & 0x0fff) == 0x0b04)
3888 {
3889 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3890 found_return = 1;
3891 }
3892 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3893 && (insn2 & 0x0e00) == 0x0a00)
3894 ;
3895 else
3896 break;
3897 }
3898 else
3899 break;
3900 }
3901
3902 if (!found_return)
3903 return 0;
3904
3905 /* Since any instruction in the epilogue sequence, with the possible
3906 exception of return itself, updates the stack pointer, we need to
3907 scan backwards for at most one instruction. Try either a 16-bit or
3908 a 32-bit instruction. This is just a heuristic, so we do not worry
3909 too much about false positives. */
3910
3911 if (pc - 4 < func_start)
3912 return 0;
3913 if (target_read_memory (pc - 4, buf, 4))
3914 return 0;
3915
3916 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3917 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3918
3919 if (thumb_instruction_restores_sp (insn2))
3920 found_stack_adjust = 1;
3921 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3922 found_stack_adjust = 1;
3923 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3924 && (insn2 & 0x0fff) == 0x0b04)
3925 found_stack_adjust = 1;
3926 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3927 && (insn2 & 0x0e00) == 0x0a00)
3928 found_stack_adjust = 1;
3929
3930 return found_stack_adjust;
3931 }
3932
3933 static int
3934 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3935 {
3936 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3937 unsigned int insn;
3938 int found_return;
3939 CORE_ADDR func_start, func_end;
3940
3941 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3942 return 0;
3943
3944 /* We are in the epilogue if the previous instruction was a stack
3945 adjustment and the next instruction is a possible return (bx, mov
3946 pc, or pop). We could have to scan backwards to find the stack
3947 adjustment, or forwards to find the return, but this is a decent
3948 approximation. First scan forwards. */
3949
3950 found_return = 0;
3951 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3952 if (bits (insn, 28, 31) != INST_NV)
3953 {
3954 if ((insn & 0x0ffffff0) == 0x012fff10)
3955 /* BX. */
3956 found_return = 1;
3957 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3958 /* MOV PC. */
3959 found_return = 1;
3960 else if ((insn & 0x0fff0000) == 0x08bd0000
3961 && (insn & 0x0000c000) != 0)
3962 /* POP (LDMIA), including PC or LR. */
3963 found_return = 1;
3964 }
3965
3966 if (!found_return)
3967 return 0;
3968
3969 /* Scan backwards. This is just a heuristic, so do not worry about
3970 false positives from mode changes. */
3971
3972 if (pc < func_start + 4)
3973 return 0;
3974
3975 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3976 if (arm_instruction_restores_sp (insn))
3977 return 1;
3978
3979 return 0;
3980 }
3981
3982 /* Implement the stack_frame_destroyed_p gdbarch method. */
3983
3984 static int
3985 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3986 {
3987 if (arm_pc_is_thumb (gdbarch, pc))
3988 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3989 else
3990 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3991 }
3992
3993 /* When arguments must be pushed onto the stack, they go on in reverse
3994 order. The code below implements a FILO (stack) to do this. */
3995
3996 struct arm_stack_item
3997 {
3998 int len;
3999 struct arm_stack_item *prev;
4000 gdb_byte *data;
4001 };
4002
4003 static struct arm_stack_item *
4004 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4005 int len)
4006 {
4007 struct arm_stack_item *si;
4008 si = XNEW (struct arm_stack_item);
4009 si->data = (gdb_byte *) xmalloc (len);
4010 si->len = len;
4011 si->prev = prev;
4012 memcpy (si->data, contents, len);
4013 return si;
4014 }
4015
4016 static struct arm_stack_item *
4017 pop_stack_item (struct arm_stack_item *si)
4018 {
4019 struct arm_stack_item *dead = si;
4020 si = si->prev;
4021 xfree (dead->data);
4022 xfree (dead);
4023 return si;
4024 }
4025
4026 /* Implement the gdbarch type alignment method, overrides the generic
4027 alignment algorithm for anything that is arm specific. */
4028
4029 static ULONGEST
4030 arm_type_align (gdbarch *gdbarch, struct type *t)
4031 {
4032 t = check_typedef (t);
4033 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4034 {
4035 /* Use the natural alignment for vector types (the same for
4036 scalar type), but the maximum alignment is 64-bit. */
4037 if (TYPE_LENGTH (t) > 8)
4038 return 8;
4039 else
4040 return TYPE_LENGTH (t);
4041 }
4042
4043 /* Allow the common code to calculate the alignment. */
4044 return 0;
4045 }
4046
4047 /* Possible base types for a candidate for passing and returning in
4048 VFP registers. */
4049
4050 enum arm_vfp_cprc_base_type
4051 {
4052 VFP_CPRC_UNKNOWN,
4053 VFP_CPRC_SINGLE,
4054 VFP_CPRC_DOUBLE,
4055 VFP_CPRC_VEC64,
4056 VFP_CPRC_VEC128
4057 };
4058
4059 /* The length of one element of base type B. */
4060
4061 static unsigned
4062 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4063 {
4064 switch (b)
4065 {
4066 case VFP_CPRC_SINGLE:
4067 return 4;
4068 case VFP_CPRC_DOUBLE:
4069 return 8;
4070 case VFP_CPRC_VEC64:
4071 return 8;
4072 case VFP_CPRC_VEC128:
4073 return 16;
4074 default:
4075 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
4076 (int) b);
4077 }
4078 }
4079
4080 /* The character ('s', 'd' or 'q') for the type of VFP register used
4081 for passing base type B. */
4082
4083 static int
4084 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4085 {
4086 switch (b)
4087 {
4088 case VFP_CPRC_SINGLE:
4089 return 's';
4090 case VFP_CPRC_DOUBLE:
4091 return 'd';
4092 case VFP_CPRC_VEC64:
4093 return 'd';
4094 case VFP_CPRC_VEC128:
4095 return 'q';
4096 default:
4097 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
4098 (int) b);
4099 }
4100 }
4101
4102 /* Determine whether T may be part of a candidate for passing and
4103 returning in VFP registers, ignoring the limit on the total number
4104 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4105 classification of the first valid component found; if it is not
4106 VFP_CPRC_UNKNOWN, all components must have the same classification
4107 as *BASE_TYPE. If it is found that T contains a type not permitted
4108 for passing and returning in VFP registers, a type differently
4109 classified from *BASE_TYPE, or two types differently classified
4110 from each other, return -1, otherwise return the total number of
4111 base-type elements found (possibly 0 in an empty structure or
4112 array). Vector types are not currently supported, matching the
4113 generic AAPCS support. */
4114
4115 static int
4116 arm_vfp_cprc_sub_candidate (struct type *t,
4117 enum arm_vfp_cprc_base_type *base_type)
4118 {
4119 t = check_typedef (t);
4120 switch (t->code ())
4121 {
4122 case TYPE_CODE_FLT:
4123 switch (TYPE_LENGTH (t))
4124 {
4125 case 4:
4126 if (*base_type == VFP_CPRC_UNKNOWN)
4127 *base_type = VFP_CPRC_SINGLE;
4128 else if (*base_type != VFP_CPRC_SINGLE)
4129 return -1;
4130 return 1;
4131
4132 case 8:
4133 if (*base_type == VFP_CPRC_UNKNOWN)
4134 *base_type = VFP_CPRC_DOUBLE;
4135 else if (*base_type != VFP_CPRC_DOUBLE)
4136 return -1;
4137 return 1;
4138
4139 default:
4140 return -1;
4141 }
4142 break;
4143
4144 case TYPE_CODE_COMPLEX:
4145 /* Arguments of complex T where T is one of the types float or
4146 double get treated as if they are implemented as:
4147
4148 struct complexT
4149 {
4150 T real;
4151 T imag;
4152 };
4153
4154 */
4155 switch (TYPE_LENGTH (t))
4156 {
4157 case 8:
4158 if (*base_type == VFP_CPRC_UNKNOWN)
4159 *base_type = VFP_CPRC_SINGLE;
4160 else if (*base_type != VFP_CPRC_SINGLE)
4161 return -1;
4162 return 2;
4163
4164 case 16:
4165 if (*base_type == VFP_CPRC_UNKNOWN)
4166 *base_type = VFP_CPRC_DOUBLE;
4167 else if (*base_type != VFP_CPRC_DOUBLE)
4168 return -1;
4169 return 2;
4170
4171 default:
4172 return -1;
4173 }
4174 break;
4175
4176 case TYPE_CODE_ARRAY:
4177 {
4178 if (t->is_vector ())
4179 {
4180 /* A 64-bit or 128-bit containerized vector type are VFP
4181 CPRCs. */
4182 switch (TYPE_LENGTH (t))
4183 {
4184 case 8:
4185 if (*base_type == VFP_CPRC_UNKNOWN)
4186 *base_type = VFP_CPRC_VEC64;
4187 return 1;
4188 case 16:
4189 if (*base_type == VFP_CPRC_UNKNOWN)
4190 *base_type = VFP_CPRC_VEC128;
4191 return 1;
4192 default:
4193 return -1;
4194 }
4195 }
4196 else
4197 {
4198 int count;
4199 unsigned unitlen;
4200
4201 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
4202 base_type);
4203 if (count == -1)
4204 return -1;
4205 if (TYPE_LENGTH (t) == 0)
4206 {
4207 gdb_assert (count == 0);
4208 return 0;
4209 }
4210 else if (count == 0)
4211 return -1;
4212 unitlen = arm_vfp_cprc_unit_length (*base_type);
4213 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
4214 return TYPE_LENGTH (t) / unitlen;
4215 }
4216 }
4217 break;
4218
4219 case TYPE_CODE_STRUCT:
4220 {
4221 int count = 0;
4222 unsigned unitlen;
4223 int i;
4224 for (i = 0; i < t->num_fields (); i++)
4225 {
4226 int sub_count = 0;
4227
4228 if (!field_is_static (&t->field (i)))
4229 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4230 base_type);
4231 if (sub_count == -1)
4232 return -1;
4233 count += sub_count;
4234 }
4235 if (TYPE_LENGTH (t) == 0)
4236 {
4237 gdb_assert (count == 0);
4238 return 0;
4239 }
4240 else if (count == 0)
4241 return -1;
4242 unitlen = arm_vfp_cprc_unit_length (*base_type);
4243 if (TYPE_LENGTH (t) != unitlen * count)
4244 return -1;
4245 return count;
4246 }
4247
4248 case TYPE_CODE_UNION:
4249 {
4250 int count = 0;
4251 unsigned unitlen;
4252 int i;
4253 for (i = 0; i < t->num_fields (); i++)
4254 {
4255 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4256 base_type);
4257 if (sub_count == -1)
4258 return -1;
4259 count = (count > sub_count ? count : sub_count);
4260 }
4261 if (TYPE_LENGTH (t) == 0)
4262 {
4263 gdb_assert (count == 0);
4264 return 0;
4265 }
4266 else if (count == 0)
4267 return -1;
4268 unitlen = arm_vfp_cprc_unit_length (*base_type);
4269 if (TYPE_LENGTH (t) != unitlen * count)
4270 return -1;
4271 return count;
4272 }
4273
4274 default:
4275 break;
4276 }
4277
4278 return -1;
4279 }
4280
4281 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4282 if passed to or returned from a non-variadic function with the VFP
4283 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4284 *BASE_TYPE to the base type for T and *COUNT to the number of
4285 elements of that base type before returning. */
4286
4287 static int
4288 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4289 int *count)
4290 {
4291 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4292 int c = arm_vfp_cprc_sub_candidate (t, &b);
4293 if (c <= 0 || c > 4)
4294 return 0;
4295 *base_type = b;
4296 *count = c;
4297 return 1;
4298 }
4299
4300 /* Return 1 if the VFP ABI should be used for passing arguments to and
4301 returning values from a function of type FUNC_TYPE, 0
4302 otherwise. */
4303
4304 static int
4305 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4306 {
4307 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4308
4309 /* Variadic functions always use the base ABI. Assume that functions
4310 without debug info are not variadic. */
4311 if (func_type && check_typedef (func_type)->has_varargs ())
4312 return 0;
4313
4314 /* The VFP ABI is only supported as a variant of AAPCS. */
4315 if (tdep->arm_abi != ARM_ABI_AAPCS)
4316 return 0;
4317
4318 return tdep->fp_model == ARM_FLOAT_VFP;
4319 }
4320
4321 /* We currently only support passing parameters in integer registers, which
4322 conforms with GCC's default model, and VFP argument passing following
4323 the VFP variant of AAPCS. Several other variants exist and
4324 we should probably support some of them based on the selected ABI. */
4325
4326 static CORE_ADDR
4327 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4328 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4329 struct value **args, CORE_ADDR sp,
4330 function_call_return_method return_method,
4331 CORE_ADDR struct_addr)
4332 {
4333 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4334 int argnum;
4335 int argreg;
4336 int nstack;
4337 struct arm_stack_item *si = NULL;
4338 int use_vfp_abi;
4339 struct type *ftype;
4340 unsigned vfp_regs_free = (1 << 16) - 1;
4341 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4342
4343 /* Determine the type of this function and whether the VFP ABI
4344 applies. */
4345 ftype = check_typedef (value_type (function));
4346 if (ftype->code () == TYPE_CODE_PTR)
4347 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
4348 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4349
4350 /* Set the return address. For the ARM, the return breakpoint is
4351 always at BP_ADDR. */
4352 if (arm_pc_is_thumb (gdbarch, bp_addr))
4353 bp_addr |= 1;
4354 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4355
4356 /* Walk through the list of args and determine how large a temporary
4357 stack is required. Need to take care here as structs may be
4358 passed on the stack, and we have to push them. */
4359 nstack = 0;
4360
4361 argreg = ARM_A1_REGNUM;
4362 nstack = 0;
4363
4364 /* The struct_return pointer occupies the first parameter
4365 passing register. */
4366 if (return_method == return_method_struct)
4367 {
4368 arm_debug_printf ("struct return in %s = %s",
4369 gdbarch_register_name (gdbarch, argreg),
4370 paddress (gdbarch, struct_addr));
4371
4372 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4373 argreg++;
4374 }
4375
4376 for (argnum = 0; argnum < nargs; argnum++)
4377 {
4378 int len;
4379 struct type *arg_type;
4380 struct type *target_type;
4381 enum type_code typecode;
4382 const bfd_byte *val;
4383 int align;
4384 enum arm_vfp_cprc_base_type vfp_base_type;
4385 int vfp_base_count;
4386 int may_use_core_reg = 1;
4387
4388 arg_type = check_typedef (value_type (args[argnum]));
4389 len = TYPE_LENGTH (arg_type);
4390 target_type = TYPE_TARGET_TYPE (arg_type);
4391 typecode = arg_type->code ();
4392 val = value_contents (args[argnum]).data ();
4393
4394 align = type_align (arg_type);
4395 /* Round alignment up to a whole number of words. */
4396 align = (align + ARM_INT_REGISTER_SIZE - 1)
4397 & ~(ARM_INT_REGISTER_SIZE - 1);
4398 /* Different ABIs have different maximum alignments. */
4399 if (tdep->arm_abi == ARM_ABI_APCS)
4400 {
4401 /* The APCS ABI only requires word alignment. */
4402 align = ARM_INT_REGISTER_SIZE;
4403 }
4404 else
4405 {
4406 /* The AAPCS requires at most doubleword alignment. */
4407 if (align > ARM_INT_REGISTER_SIZE * 2)
4408 align = ARM_INT_REGISTER_SIZE * 2;
4409 }
4410
4411 if (use_vfp_abi
4412 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4413 &vfp_base_count))
4414 {
4415 int regno;
4416 int unit_length;
4417 int shift;
4418 unsigned mask;
4419
4420 /* Because this is a CPRC it cannot go in a core register or
4421 cause a core register to be skipped for alignment.
4422 Either it goes in VFP registers and the rest of this loop
4423 iteration is skipped for this argument, or it goes on the
4424 stack (and the stack alignment code is correct for this
4425 case). */
4426 may_use_core_reg = 0;
4427
4428 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4429 shift = unit_length / 4;
4430 mask = (1 << (shift * vfp_base_count)) - 1;
4431 for (regno = 0; regno < 16; regno += shift)
4432 if (((vfp_regs_free >> regno) & mask) == mask)
4433 break;
4434
4435 if (regno < 16)
4436 {
4437 int reg_char;
4438 int reg_scaled;
4439 int i;
4440
4441 vfp_regs_free &= ~(mask << regno);
4442 reg_scaled = regno / shift;
4443 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4444 for (i = 0; i < vfp_base_count; i++)
4445 {
4446 char name_buf[4];
4447 int regnum;
4448 if (reg_char == 'q')
4449 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4450 val + i * unit_length);
4451 else
4452 {
4453 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4454 reg_char, reg_scaled + i);
4455 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4456 strlen (name_buf));
4457 regcache->cooked_write (regnum, val + i * unit_length);
4458 }
4459 }
4460 continue;
4461 }
4462 else
4463 {
4464 /* This CPRC could not go in VFP registers, so all VFP
4465 registers are now marked as used. */
4466 vfp_regs_free = 0;
4467 }
4468 }
4469
4470 /* Push stack padding for doubleword alignment. */
4471 if (nstack & (align - 1))
4472 {
4473 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4474 nstack += ARM_INT_REGISTER_SIZE;
4475 }
4476
4477 /* Doubleword aligned quantities must go in even register pairs. */
4478 if (may_use_core_reg
4479 && argreg <= ARM_LAST_ARG_REGNUM
4480 && align > ARM_INT_REGISTER_SIZE
4481 && argreg & 1)
4482 argreg++;
4483
4484 /* If the argument is a pointer to a function, and it is a
4485 Thumb function, create a LOCAL copy of the value and set
4486 the THUMB bit in it. */
4487 if (TYPE_CODE_PTR == typecode
4488 && target_type != NULL
4489 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4490 {
4491 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4492 if (arm_pc_is_thumb (gdbarch, regval))
4493 {
4494 bfd_byte *copy = (bfd_byte *) alloca (len);
4495 store_unsigned_integer (copy, len, byte_order,
4496 MAKE_THUMB_ADDR (regval));
4497 val = copy;
4498 }
4499 }
4500
4501 /* Copy the argument to general registers or the stack in
4502 register-sized pieces. Large arguments are split between
4503 registers and stack. */
4504 while (len > 0)
4505 {
4506 int partial_len = len < ARM_INT_REGISTER_SIZE
4507 ? len : ARM_INT_REGISTER_SIZE;
4508 CORE_ADDR regval
4509 = extract_unsigned_integer (val, partial_len, byte_order);
4510
4511 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4512 {
4513 /* The argument is being passed in a general purpose
4514 register. */
4515 if (byte_order == BFD_ENDIAN_BIG)
4516 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4517
4518 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4519 gdbarch_register_name (gdbarch, argreg),
4520 phex (regval, ARM_INT_REGISTER_SIZE));
4521
4522 regcache_cooked_write_unsigned (regcache, argreg, regval);
4523 argreg++;
4524 }
4525 else
4526 {
4527 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4528
4529 memset (buf, 0, sizeof (buf));
4530 store_unsigned_integer (buf, partial_len, byte_order, regval);
4531
4532 /* Push the arguments onto the stack. */
4533 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4534 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4535 nstack += ARM_INT_REGISTER_SIZE;
4536 }
4537
4538 len -= partial_len;
4539 val += partial_len;
4540 }
4541 }
4542 /* If we have an odd number of words to push, then decrement the stack
4543 by one word now, so first stack argument will be dword aligned. */
4544 if (nstack & 4)
4545 sp -= 4;
4546
4547 while (si)
4548 {
4549 sp -= si->len;
4550 write_memory (sp, si->data, si->len);
4551 si = pop_stack_item (si);
4552 }
4553
4554 /* Finally, update teh SP register. */
4555 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4556
4557 return sp;
4558 }
4559
4560
4561 /* Always align the frame to an 8-byte boundary. This is required on
4562 some platforms and harmless on the rest. */
4563
4564 static CORE_ADDR
4565 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4566 {
4567 /* Align the stack to eight bytes. */
4568 return sp & ~ (CORE_ADDR) 7;
4569 }
4570
4571 static void
4572 print_fpu_flags (struct ui_file *file, int flags)
4573 {
4574 if (flags & (1 << 0))
4575 gdb_puts ("IVO ", file);
4576 if (flags & (1 << 1))
4577 gdb_puts ("DVZ ", file);
4578 if (flags & (1 << 2))
4579 gdb_puts ("OFL ", file);
4580 if (flags & (1 << 3))
4581 gdb_puts ("UFL ", file);
4582 if (flags & (1 << 4))
4583 gdb_puts ("INX ", file);
4584 gdb_putc ('\n', file);
4585 }
4586
4587 /* Print interesting information about the floating point processor
4588 (if present) or emulator. */
4589 static void
4590 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4591 struct frame_info *frame, const char *args)
4592 {
4593 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4594 int type;
4595
4596 type = (status >> 24) & 127;
4597 if (status & (1 << 31))
4598 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4599 else
4600 gdb_printf (file, _("Software FPU type %d\n"), type);
4601 /* i18n: [floating point unit] mask */
4602 gdb_puts (_("mask: "), file);
4603 print_fpu_flags (file, status >> 16);
4604 /* i18n: [floating point unit] flags */
4605 gdb_puts (_("flags: "), file);
4606 print_fpu_flags (file, status);
4607 }
4608
4609 /* Construct the ARM extended floating point type. */
4610 static struct type *
4611 arm_ext_type (struct gdbarch *gdbarch)
4612 {
4613 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4614
4615 if (!tdep->arm_ext_type)
4616 tdep->arm_ext_type
4617 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4618 floatformats_arm_ext);
4619
4620 return tdep->arm_ext_type;
4621 }
4622
4623 static struct type *
4624 arm_neon_double_type (struct gdbarch *gdbarch)
4625 {
4626 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4627
4628 if (tdep->neon_double_type == NULL)
4629 {
4630 struct type *t, *elem;
4631
4632 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4633 TYPE_CODE_UNION);
4634 elem = builtin_type (gdbarch)->builtin_uint8;
4635 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4636 elem = builtin_type (gdbarch)->builtin_uint16;
4637 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4638 elem = builtin_type (gdbarch)->builtin_uint32;
4639 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4640 elem = builtin_type (gdbarch)->builtin_uint64;
4641 append_composite_type_field (t, "u64", elem);
4642 elem = builtin_type (gdbarch)->builtin_float;
4643 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4644 elem = builtin_type (gdbarch)->builtin_double;
4645 append_composite_type_field (t, "f64", elem);
4646
4647 t->set_is_vector (true);
4648 t->set_name ("neon_d");
4649 tdep->neon_double_type = t;
4650 }
4651
4652 return tdep->neon_double_type;
4653 }
4654
4655 /* FIXME: The vector types are not correctly ordered on big-endian
4656 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4657 bits of d0 - regardless of what unit size is being held in d0. So
4658 the offset of the first uint8 in d0 is 7, but the offset of the
4659 first float is 4. This code works as-is for little-endian
4660 targets. */
4661
4662 static struct type *
4663 arm_neon_quad_type (struct gdbarch *gdbarch)
4664 {
4665 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4666
4667 if (tdep->neon_quad_type == NULL)
4668 {
4669 struct type *t, *elem;
4670
4671 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4672 TYPE_CODE_UNION);
4673 elem = builtin_type (gdbarch)->builtin_uint8;
4674 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4675 elem = builtin_type (gdbarch)->builtin_uint16;
4676 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4677 elem = builtin_type (gdbarch)->builtin_uint32;
4678 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4679 elem = builtin_type (gdbarch)->builtin_uint64;
4680 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4681 elem = builtin_type (gdbarch)->builtin_float;
4682 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4683 elem = builtin_type (gdbarch)->builtin_double;
4684 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4685
4686 t->set_is_vector (true);
4687 t->set_name ("neon_q");
4688 tdep->neon_quad_type = t;
4689 }
4690
4691 return tdep->neon_quad_type;
4692 }
4693
4694 /* Return true if REGNUM is a Q pseudo register. Return false
4695 otherwise.
4696
4697 REGNUM is the raw register number and not a pseudo-relative register
4698 number. */
4699
4700 static bool
4701 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4702 {
4703 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4704
4705 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4706 MVE (Q0~Q7) features. */
4707 if (tdep->have_q_pseudos
4708 && regnum >= tdep->q_pseudo_base
4709 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4710 return true;
4711
4712 return false;
4713 }
4714
4715 /* Return true if REGNUM is a VFP S pseudo register. Return false
4716 otherwise.
4717
4718 REGNUM is the raw register number and not a pseudo-relative register
4719 number. */
4720
4721 static bool
4722 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4723 {
4724 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4725
4726 if (tdep->have_s_pseudos
4727 && regnum >= tdep->s_pseudo_base
4728 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4729 return true;
4730
4731 return false;
4732 }
4733
4734 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4735 otherwise.
4736
4737 REGNUM is the raw register number and not a pseudo-relative register
4738 number. */
4739
4740 static bool
4741 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4742 {
4743 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4744
4745 if (tdep->have_mve
4746 && regnum >= tdep->mve_pseudo_base
4747 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4748 return true;
4749
4750 return false;
4751 }
4752
4753 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
4754 false otherwise.
4755
4756 REGNUM is the raw register number and not a pseudo-relative register
4757 number. */
4758
4759 static bool
4760 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
4761 {
4762 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4763
4764 if (tdep->have_pacbti
4765 && regnum >= tdep->pacbti_pseudo_base
4766 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
4767 return true;
4768
4769 return false;
4770 }
4771
4772 /* Return the GDB type object for the "standard" data type of data in
4773 register N. */
4774
4775 static struct type *
4776 arm_register_type (struct gdbarch *gdbarch, int regnum)
4777 {
4778 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4779
4780 if (is_s_pseudo (gdbarch, regnum))
4781 return builtin_type (gdbarch)->builtin_float;
4782
4783 if (is_q_pseudo (gdbarch, regnum))
4784 return arm_neon_quad_type (gdbarch);
4785
4786 if (is_mve_pseudo (gdbarch, regnum))
4787 return builtin_type (gdbarch)->builtin_int16;
4788
4789 if (is_pacbti_pseudo (gdbarch, regnum))
4790 return builtin_type (gdbarch)->builtin_uint32;
4791
4792 /* If the target description has register information, we are only
4793 in this function so that we can override the types of
4794 double-precision registers for NEON. */
4795 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4796 {
4797 struct type *t = tdesc_register_type (gdbarch, regnum);
4798
4799 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4800 && t->code () == TYPE_CODE_FLT
4801 && tdep->have_neon)
4802 return arm_neon_double_type (gdbarch);
4803 else
4804 return t;
4805 }
4806
4807 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4808 {
4809 if (!tdep->have_fpa_registers)
4810 return builtin_type (gdbarch)->builtin_void;
4811
4812 return arm_ext_type (gdbarch);
4813 }
4814 else if (regnum == ARM_SP_REGNUM)
4815 return builtin_type (gdbarch)->builtin_data_ptr;
4816 else if (regnum == ARM_PC_REGNUM)
4817 return builtin_type (gdbarch)->builtin_func_ptr;
4818 else if (regnum >= ARRAY_SIZE (arm_register_names))
4819 /* These registers are only supported on targets which supply
4820 an XML description. */
4821 return builtin_type (gdbarch)->builtin_int0;
4822 else
4823 return builtin_type (gdbarch)->builtin_uint32;
4824 }
4825
4826 /* Map a DWARF register REGNUM onto the appropriate GDB register
4827 number. */
4828
4829 static int
4830 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4831 {
4832 /* Core integer regs. */
4833 if (reg >= 0 && reg <= 15)
4834 return reg;
4835
4836 /* Legacy FPA encoding. These were once used in a way which
4837 overlapped with VFP register numbering, so their use is
4838 discouraged, but GDB doesn't support the ARM toolchain
4839 which used them for VFP. */
4840 if (reg >= 16 && reg <= 23)
4841 return ARM_F0_REGNUM + reg - 16;
4842
4843 /* New assignments for the FPA registers. */
4844 if (reg >= 96 && reg <= 103)
4845 return ARM_F0_REGNUM + reg - 96;
4846
4847 /* WMMX register assignments. */
4848 if (reg >= 104 && reg <= 111)
4849 return ARM_WCGR0_REGNUM + reg - 104;
4850
4851 if (reg >= 112 && reg <= 127)
4852 return ARM_WR0_REGNUM + reg - 112;
4853
4854 /* PACBTI register containing the Pointer Authentication Code. */
4855 if (reg == ARM_DWARF_RA_AUTH_CODE)
4856 {
4857 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4858
4859 if (tdep->have_pacbti)
4860 return tdep->pacbti_pseudo_base;
4861
4862 return -1;
4863 }
4864
4865 if (reg >= 192 && reg <= 199)
4866 return ARM_WC0_REGNUM + reg - 192;
4867
4868 /* VFP v2 registers. A double precision value is actually
4869 in d1 rather than s2, but the ABI only defines numbering
4870 for the single precision registers. This will "just work"
4871 in GDB for little endian targets (we'll read eight bytes,
4872 starting in s0 and then progressing to s1), but will be
4873 reversed on big endian targets with VFP. This won't
4874 be a problem for the new Neon quad registers; you're supposed
4875 to use DW_OP_piece for those. */
4876 if (reg >= 64 && reg <= 95)
4877 {
4878 char name_buf[4];
4879
4880 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4881 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4882 strlen (name_buf));
4883 }
4884
4885 /* VFP v3 / Neon registers. This range is also used for VFP v2
4886 registers, except that it now describes d0 instead of s0. */
4887 if (reg >= 256 && reg <= 287)
4888 {
4889 char name_buf[4];
4890
4891 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4892 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4893 strlen (name_buf));
4894 }
4895
4896 return -1;
4897 }
4898
4899 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4900 static int
4901 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4902 {
4903 int reg = regnum;
4904 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4905
4906 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4907 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4908
4909 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4910 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4911
4912 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4913 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4914
4915 if (reg < NUM_GREGS)
4916 return SIM_ARM_R0_REGNUM + reg;
4917 reg -= NUM_GREGS;
4918
4919 if (reg < NUM_FREGS)
4920 return SIM_ARM_FP0_REGNUM + reg;
4921 reg -= NUM_FREGS;
4922
4923 if (reg < NUM_SREGS)
4924 return SIM_ARM_FPS_REGNUM + reg;
4925 reg -= NUM_SREGS;
4926
4927 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4928 }
4929
4930 static const unsigned char op_lit0 = DW_OP_lit0;
4931
4932 static void
4933 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
4934 struct dwarf2_frame_state_reg *reg,
4935 struct frame_info *this_frame)
4936 {
4937 if (is_pacbti_pseudo (gdbarch, regnum))
4938 {
4939 /* Initialize RA_AUTH_CODE to zero. */
4940 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
4941 reg->loc.exp.start = &op_lit0;
4942 reg->loc.exp.len = 1;
4943 return;
4944 }
4945
4946 switch (regnum)
4947 {
4948 case ARM_PC_REGNUM:
4949 case ARM_PS_REGNUM:
4950 reg->how = DWARF2_FRAME_REG_FN;
4951 reg->loc.fn = arm_dwarf2_prev_register;
4952 break;
4953 case ARM_SP_REGNUM:
4954 reg->how = DWARF2_FRAME_REG_CFA;
4955 break;
4956 }
4957 }
4958
4959 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4960 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4961 NULL if an error occurs. BUF is freed. */
4962
4963 static gdb_byte *
4964 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4965 int old_len, int new_len)
4966 {
4967 gdb_byte *new_buf;
4968 int bytes_to_read = new_len - old_len;
4969
4970 new_buf = (gdb_byte *) xmalloc (new_len);
4971 memcpy (new_buf + bytes_to_read, buf, old_len);
4972 xfree (buf);
4973 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4974 {
4975 xfree (new_buf);
4976 return NULL;
4977 }
4978 return new_buf;
4979 }
4980
4981 /* An IT block is at most the 2-byte IT instruction followed by
4982 four 4-byte instructions. The furthest back we must search to
4983 find an IT block that affects the current instruction is thus
4984 2 + 3 * 4 == 14 bytes. */
4985 #define MAX_IT_BLOCK_PREFIX 14
4986
4987 /* Use a quick scan if there are more than this many bytes of
4988 code. */
4989 #define IT_SCAN_THRESHOLD 32
4990
4991 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4992 A breakpoint in an IT block may not be hit, depending on the
4993 condition flags. */
4994 static CORE_ADDR
4995 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4996 {
4997 gdb_byte *buf;
4998 char map_type;
4999 CORE_ADDR boundary, func_start;
5000 int buf_len;
5001 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5002 int i, any, last_it, last_it_count;
5003 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
5004
5005 /* If we are using BKPT breakpoints, none of this is necessary. */
5006 if (tdep->thumb2_breakpoint == NULL)
5007 return bpaddr;
5008
5009 /* ARM mode does not have this problem. */
5010 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5011 return bpaddr;
5012
5013 /* We are setting a breakpoint in Thumb code that could potentially
5014 contain an IT block. The first step is to find how much Thumb
5015 code there is; we do not need to read outside of known Thumb
5016 sequences. */
5017 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5018 if (map_type == 0)
5019 /* Thumb-2 code must have mapping symbols to have a chance. */
5020 return bpaddr;
5021
5022 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5023
5024 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5025 && func_start > boundary)
5026 boundary = func_start;
5027
5028 /* Search for a candidate IT instruction. We have to do some fancy
5029 footwork to distinguish a real IT instruction from the second
5030 half of a 32-bit instruction, but there is no need for that if
5031 there's no candidate. */
5032 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5033 if (buf_len == 0)
5034 /* No room for an IT instruction. */
5035 return bpaddr;
5036
5037 buf = (gdb_byte *) xmalloc (buf_len);
5038 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5039 return bpaddr;
5040 any = 0;
5041 for (i = 0; i < buf_len; i += 2)
5042 {
5043 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5044 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5045 {
5046 any = 1;
5047 break;
5048 }
5049 }
5050
5051 if (any == 0)
5052 {
5053 xfree (buf);
5054 return bpaddr;
5055 }
5056
5057 /* OK, the code bytes before this instruction contain at least one
5058 halfword which resembles an IT instruction. We know that it's
5059 Thumb code, but there are still two possibilities. Either the
5060 halfword really is an IT instruction, or it is the second half of
5061 a 32-bit Thumb instruction. The only way we can tell is to
5062 scan forwards from a known instruction boundary. */
5063 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5064 {
5065 int definite;
5066
5067 /* There's a lot of code before this instruction. Start with an
5068 optimistic search; it's easy to recognize halfwords that can
5069 not be the start of a 32-bit instruction, and use that to
5070 lock on to the instruction boundaries. */
5071 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5072 if (buf == NULL)
5073 return bpaddr;
5074 buf_len = IT_SCAN_THRESHOLD;
5075
5076 definite = 0;
5077 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5078 {
5079 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5080 if (thumb_insn_size (inst1) == 2)
5081 {
5082 definite = 1;
5083 break;
5084 }
5085 }
5086
5087 /* At this point, if DEFINITE, BUF[I] is the first place we
5088 are sure that we know the instruction boundaries, and it is far
5089 enough from BPADDR that we could not miss an IT instruction
5090 affecting BPADDR. If ! DEFINITE, give up - start from a
5091 known boundary. */
5092 if (! definite)
5093 {
5094 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5095 bpaddr - boundary);
5096 if (buf == NULL)
5097 return bpaddr;
5098 buf_len = bpaddr - boundary;
5099 i = 0;
5100 }
5101 }
5102 else
5103 {
5104 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5105 if (buf == NULL)
5106 return bpaddr;
5107 buf_len = bpaddr - boundary;
5108 i = 0;
5109 }
5110
5111 /* Scan forwards. Find the last IT instruction before BPADDR. */
5112 last_it = -1;
5113 last_it_count = 0;
5114 while (i < buf_len)
5115 {
5116 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5117 last_it_count--;
5118 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5119 {
5120 last_it = i;
5121 if (inst1 & 0x0001)
5122 last_it_count = 4;
5123 else if (inst1 & 0x0002)
5124 last_it_count = 3;
5125 else if (inst1 & 0x0004)
5126 last_it_count = 2;
5127 else
5128 last_it_count = 1;
5129 }
5130 i += thumb_insn_size (inst1);
5131 }
5132
5133 xfree (buf);
5134
5135 if (last_it == -1)
5136 /* There wasn't really an IT instruction after all. */
5137 return bpaddr;
5138
5139 if (last_it_count < 1)
5140 /* It was too far away. */
5141 return bpaddr;
5142
5143 /* This really is a trouble spot. Move the breakpoint to the IT
5144 instruction. */
5145 return bpaddr - buf_len + last_it;
5146 }
5147
5148 /* ARM displaced stepping support.
5149
5150 Generally ARM displaced stepping works as follows:
5151
5152 1. When an instruction is to be single-stepped, it is first decoded by
5153 arm_process_displaced_insn. Depending on the type of instruction, it is
5154 then copied to a scratch location, possibly in a modified form. The
5155 copy_* set of functions performs such modification, as necessary. A
5156 breakpoint is placed after the modified instruction in the scratch space
5157 to return control to GDB. Note in particular that instructions which
5158 modify the PC will no longer do so after modification.
5159
5160 2. The instruction is single-stepped, by setting the PC to the scratch
5161 location address, and resuming. Control returns to GDB when the
5162 breakpoint is hit.
5163
5164 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5165 function used for the current instruction. This function's job is to
5166 put the CPU/memory state back to what it would have been if the
5167 instruction had been executed unmodified in its original location. */
5168
5169 /* NOP instruction (mov r0, r0). */
5170 #define ARM_NOP 0xe1a00000
5171 #define THUMB_NOP 0x4600
5172
5173 /* Helper for register reads for displaced stepping. In particular, this
5174 returns the PC as it would be seen by the instruction at its original
5175 location. */
5176
5177 ULONGEST
5178 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5179 int regno)
5180 {
5181 ULONGEST ret;
5182 CORE_ADDR from = dsc->insn_addr;
5183
5184 if (regno == ARM_PC_REGNUM)
5185 {
5186 /* Compute pipeline offset:
5187 - When executing an ARM instruction, PC reads as the address of the
5188 current instruction plus 8.
5189 - When executing a Thumb instruction, PC reads as the address of the
5190 current instruction plus 4. */
5191
5192 if (!dsc->is_thumb)
5193 from += 8;
5194 else
5195 from += 4;
5196
5197 displaced_debug_printf ("read pc value %.8lx",
5198 (unsigned long) from);
5199 return (ULONGEST) from;
5200 }
5201 else
5202 {
5203 regcache_cooked_read_unsigned (regs, regno, &ret);
5204
5205 displaced_debug_printf ("read r%d value %.8lx",
5206 regno, (unsigned long) ret);
5207
5208 return ret;
5209 }
5210 }
5211
5212 static int
5213 displaced_in_arm_mode (struct regcache *regs)
5214 {
5215 ULONGEST ps;
5216 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5217
5218 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5219
5220 return (ps & t_bit) == 0;
5221 }
5222
5223 /* Write to the PC as from a branch instruction. */
5224
5225 static void
5226 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5227 ULONGEST val)
5228 {
5229 if (!dsc->is_thumb)
5230 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5231 architecture versions < 6. */
5232 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5233 val & ~(ULONGEST) 0x3);
5234 else
5235 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5236 val & ~(ULONGEST) 0x1);
5237 }
5238
5239 /* Write to the PC as from a branch-exchange instruction. */
5240
5241 static void
5242 bx_write_pc (struct regcache *regs, ULONGEST val)
5243 {
5244 ULONGEST ps;
5245 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5246
5247 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5248
5249 if ((val & 1) == 1)
5250 {
5251 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5252 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5253 }
5254 else if ((val & 2) == 0)
5255 {
5256 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5257 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5258 }
5259 else
5260 {
5261 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5262 mode, align dest to 4 bytes). */
5263 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5264 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5265 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5266 }
5267 }
5268
5269 /* Write to the PC as if from a load instruction. */
5270
5271 static void
5272 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5273 ULONGEST val)
5274 {
5275 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5276 bx_write_pc (regs, val);
5277 else
5278 branch_write_pc (regs, dsc, val);
5279 }
5280
5281 /* Write to the PC as if from an ALU instruction. */
5282
5283 static void
5284 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5285 ULONGEST val)
5286 {
5287 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5288 bx_write_pc (regs, val);
5289 else
5290 branch_write_pc (regs, dsc, val);
5291 }
5292
5293 /* Helper for writing to registers for displaced stepping. Writing to the PC
5294 has a varying effects depending on the instruction which does the write:
5295 this is controlled by the WRITE_PC argument. */
5296
5297 void
5298 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5299 int regno, ULONGEST val, enum pc_write_style write_pc)
5300 {
5301 if (regno == ARM_PC_REGNUM)
5302 {
5303 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5304
5305 switch (write_pc)
5306 {
5307 case BRANCH_WRITE_PC:
5308 branch_write_pc (regs, dsc, val);
5309 break;
5310
5311 case BX_WRITE_PC:
5312 bx_write_pc (regs, val);
5313 break;
5314
5315 case LOAD_WRITE_PC:
5316 load_write_pc (regs, dsc, val);
5317 break;
5318
5319 case ALU_WRITE_PC:
5320 alu_write_pc (regs, dsc, val);
5321 break;
5322
5323 case CANNOT_WRITE_PC:
5324 warning (_("Instruction wrote to PC in an unexpected way when "
5325 "single-stepping"));
5326 break;
5327
5328 default:
5329 internal_error (__FILE__, __LINE__,
5330 _("Invalid argument to displaced_write_reg"));
5331 }
5332
5333 dsc->wrote_to_pc = 1;
5334 }
5335 else
5336 {
5337 displaced_debug_printf ("writing r%d value %.8lx",
5338 regno, (unsigned long) val);
5339 regcache_cooked_write_unsigned (regs, regno, val);
5340 }
5341 }
5342
5343 /* This function is used to concisely determine if an instruction INSN
5344 references PC. Register fields of interest in INSN should have the
5345 corresponding fields of BITMASK set to 0b1111. The function
5346 returns return 1 if any of these fields in INSN reference the PC
5347 (also 0b1111, r15), else it returns 0. */
5348
5349 static int
5350 insn_references_pc (uint32_t insn, uint32_t bitmask)
5351 {
5352 uint32_t lowbit = 1;
5353
5354 while (bitmask != 0)
5355 {
5356 uint32_t mask;
5357
5358 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5359 ;
5360
5361 if (!lowbit)
5362 break;
5363
5364 mask = lowbit * 0xf;
5365
5366 if ((insn & mask) == mask)
5367 return 1;
5368
5369 bitmask &= ~mask;
5370 }
5371
5372 return 0;
5373 }
5374
5375 /* The simplest copy function. Many instructions have the same effect no
5376 matter what address they are executed at: in those cases, use this. */
5377
5378 static int
5379 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5380 arm_displaced_step_copy_insn_closure *dsc)
5381 {
5382 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5383 (unsigned long) insn, iname);
5384
5385 dsc->modinsn[0] = insn;
5386
5387 return 0;
5388 }
5389
5390 static int
5391 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5392 uint16_t insn2, const char *iname,
5393 arm_displaced_step_copy_insn_closure *dsc)
5394 {
5395 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5396 "unmodified", insn1, insn2, iname);
5397
5398 dsc->modinsn[0] = insn1;
5399 dsc->modinsn[1] = insn2;
5400 dsc->numinsns = 2;
5401
5402 return 0;
5403 }
5404
5405 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5406 modification. */
5407 static int
5408 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5409 const char *iname,
5410 arm_displaced_step_copy_insn_closure *dsc)
5411 {
5412 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5413 insn, iname);
5414
5415 dsc->modinsn[0] = insn;
5416
5417 return 0;
5418 }
5419
5420 /* Preload instructions with immediate offset. */
5421
5422 static void
5423 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5424 arm_displaced_step_copy_insn_closure *dsc)
5425 {
5426 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5427 if (!dsc->u.preload.immed)
5428 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5429 }
5430
5431 static void
5432 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5433 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5434 {
5435 ULONGEST rn_val;
5436 /* Preload instructions:
5437
5438 {pli/pld} [rn, #+/-imm]
5439 ->
5440 {pli/pld} [r0, #+/-imm]. */
5441
5442 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5443 rn_val = displaced_read_reg (regs, dsc, rn);
5444 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5445 dsc->u.preload.immed = 1;
5446
5447 dsc->cleanup = &cleanup_preload;
5448 }
5449
5450 static int
5451 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5452 arm_displaced_step_copy_insn_closure *dsc)
5453 {
5454 unsigned int rn = bits (insn, 16, 19);
5455
5456 if (!insn_references_pc (insn, 0x000f0000ul))
5457 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5458
5459 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5460
5461 dsc->modinsn[0] = insn & 0xfff0ffff;
5462
5463 install_preload (gdbarch, regs, dsc, rn);
5464
5465 return 0;
5466 }
5467
5468 static int
5469 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5470 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5471 {
5472 unsigned int rn = bits (insn1, 0, 3);
5473 unsigned int u_bit = bit (insn1, 7);
5474 int imm12 = bits (insn2, 0, 11);
5475 ULONGEST pc_val;
5476
5477 if (rn != ARM_PC_REGNUM)
5478 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5479
5480 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5481 PLD (literal) Encoding T1. */
5482 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5483 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5484 imm12);
5485
5486 if (!u_bit)
5487 imm12 = -1 * imm12;
5488
5489 /* Rewrite instruction {pli/pld} PC imm12 into:
5490 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5491
5492 {pli/pld} [r0, r1]
5493
5494 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5495
5496 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5497 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5498
5499 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5500
5501 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5502 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5503 dsc->u.preload.immed = 0;
5504
5505 /* {pli/pld} [r0, r1] */
5506 dsc->modinsn[0] = insn1 & 0xfff0;
5507 dsc->modinsn[1] = 0xf001;
5508 dsc->numinsns = 2;
5509
5510 dsc->cleanup = &cleanup_preload;
5511 return 0;
5512 }
5513
5514 /* Preload instructions with register offset. */
5515
5516 static void
5517 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5518 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5519 unsigned int rm)
5520 {
5521 ULONGEST rn_val, rm_val;
5522
5523 /* Preload register-offset instructions:
5524
5525 {pli/pld} [rn, rm {, shift}]
5526 ->
5527 {pli/pld} [r0, r1 {, shift}]. */
5528
5529 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5530 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5531 rn_val = displaced_read_reg (regs, dsc, rn);
5532 rm_val = displaced_read_reg (regs, dsc, rm);
5533 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5534 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5535 dsc->u.preload.immed = 0;
5536
5537 dsc->cleanup = &cleanup_preload;
5538 }
5539
5540 static int
5541 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5542 struct regcache *regs,
5543 arm_displaced_step_copy_insn_closure *dsc)
5544 {
5545 unsigned int rn = bits (insn, 16, 19);
5546 unsigned int rm = bits (insn, 0, 3);
5547
5548
5549 if (!insn_references_pc (insn, 0x000f000ful))
5550 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5551
5552 displaced_debug_printf ("copying preload insn %.8lx",
5553 (unsigned long) insn);
5554
5555 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5556
5557 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5558 return 0;
5559 }
5560
5561 /* Copy/cleanup coprocessor load and store instructions. */
5562
5563 static void
5564 cleanup_copro_load_store (struct gdbarch *gdbarch,
5565 struct regcache *regs,
5566 arm_displaced_step_copy_insn_closure *dsc)
5567 {
5568 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5569
5570 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5571
5572 if (dsc->u.ldst.writeback)
5573 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5574 }
5575
5576 static void
5577 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5578 arm_displaced_step_copy_insn_closure *dsc,
5579 int writeback, unsigned int rn)
5580 {
5581 ULONGEST rn_val;
5582
5583 /* Coprocessor load/store instructions:
5584
5585 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5586 ->
5587 {stc/stc2} [r0, #+/-imm].
5588
5589 ldc/ldc2 are handled identically. */
5590
5591 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5592 rn_val = displaced_read_reg (regs, dsc, rn);
5593 /* PC should be 4-byte aligned. */
5594 rn_val = rn_val & 0xfffffffc;
5595 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5596
5597 dsc->u.ldst.writeback = writeback;
5598 dsc->u.ldst.rn = rn;
5599
5600 dsc->cleanup = &cleanup_copro_load_store;
5601 }
5602
5603 static int
5604 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5605 struct regcache *regs,
5606 arm_displaced_step_copy_insn_closure *dsc)
5607 {
5608 unsigned int rn = bits (insn, 16, 19);
5609
5610 if (!insn_references_pc (insn, 0x000f0000ul))
5611 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5612
5613 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5614 (unsigned long) insn);
5615
5616 dsc->modinsn[0] = insn & 0xfff0ffff;
5617
5618 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5619
5620 return 0;
5621 }
5622
5623 static int
5624 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5625 uint16_t insn2, struct regcache *regs,
5626 arm_displaced_step_copy_insn_closure *dsc)
5627 {
5628 unsigned int rn = bits (insn1, 0, 3);
5629
5630 if (rn != ARM_PC_REGNUM)
5631 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5632 "copro load/store", dsc);
5633
5634 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5635 insn1, insn2);
5636
5637 dsc->modinsn[0] = insn1 & 0xfff0;
5638 dsc->modinsn[1] = insn2;
5639 dsc->numinsns = 2;
5640
5641 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5642 doesn't support writeback, so pass 0. */
5643 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5644
5645 return 0;
5646 }
5647
5648 /* Clean up branch instructions (actually perform the branch, by setting
5649 PC). */
5650
5651 static void
5652 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5653 arm_displaced_step_copy_insn_closure *dsc)
5654 {
5655 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5656 int branch_taken = condition_true (dsc->u.branch.cond, status);
5657 enum pc_write_style write_pc = dsc->u.branch.exchange
5658 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5659
5660 if (!branch_taken)
5661 return;
5662
5663 if (dsc->u.branch.link)
5664 {
5665 /* The value of LR should be the next insn of current one. In order
5666 not to confuse logic handling later insn `bx lr', if current insn mode
5667 is Thumb, the bit 0 of LR value should be set to 1. */
5668 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5669
5670 if (dsc->is_thumb)
5671 next_insn_addr |= 0x1;
5672
5673 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5674 CANNOT_WRITE_PC);
5675 }
5676
5677 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5678 }
5679
5680 /* Copy B/BL/BLX instructions with immediate destinations. */
5681
5682 static void
5683 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5684 arm_displaced_step_copy_insn_closure *dsc,
5685 unsigned int cond, int exchange, int link, long offset)
5686 {
5687 /* Implement "BL<cond> <label>" as:
5688
5689 Preparation: cond <- instruction condition
5690 Insn: mov r0, r0 (nop)
5691 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5692
5693 B<cond> similar, but don't set r14 in cleanup. */
5694
5695 dsc->u.branch.cond = cond;
5696 dsc->u.branch.link = link;
5697 dsc->u.branch.exchange = exchange;
5698
5699 dsc->u.branch.dest = dsc->insn_addr;
5700 if (link && exchange)
5701 /* For BLX, offset is computed from the Align (PC, 4). */
5702 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5703
5704 if (dsc->is_thumb)
5705 dsc->u.branch.dest += 4 + offset;
5706 else
5707 dsc->u.branch.dest += 8 + offset;
5708
5709 dsc->cleanup = &cleanup_branch;
5710 }
5711 static int
5712 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5713 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5714 {
5715 unsigned int cond = bits (insn, 28, 31);
5716 int exchange = (cond == 0xf);
5717 int link = exchange || bit (insn, 24);
5718 long offset;
5719
5720 displaced_debug_printf ("copying %s immediate insn %.8lx",
5721 (exchange) ? "blx" : (link) ? "bl" : "b",
5722 (unsigned long) insn);
5723 if (exchange)
5724 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5725 then arrange the switch into Thumb mode. */
5726 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5727 else
5728 offset = bits (insn, 0, 23) << 2;
5729
5730 if (bit (offset, 25))
5731 offset = offset | ~0x3ffffff;
5732
5733 dsc->modinsn[0] = ARM_NOP;
5734
5735 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5736 return 0;
5737 }
5738
5739 static int
5740 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5741 uint16_t insn2, struct regcache *regs,
5742 arm_displaced_step_copy_insn_closure *dsc)
5743 {
5744 int link = bit (insn2, 14);
5745 int exchange = link && !bit (insn2, 12);
5746 int cond = INST_AL;
5747 long offset = 0;
5748 int j1 = bit (insn2, 13);
5749 int j2 = bit (insn2, 11);
5750 int s = sbits (insn1, 10, 10);
5751 int i1 = !(j1 ^ bit (insn1, 10));
5752 int i2 = !(j2 ^ bit (insn1, 10));
5753
5754 if (!link && !exchange) /* B */
5755 {
5756 offset = (bits (insn2, 0, 10) << 1);
5757 if (bit (insn2, 12)) /* Encoding T4 */
5758 {
5759 offset |= (bits (insn1, 0, 9) << 12)
5760 | (i2 << 22)
5761 | (i1 << 23)
5762 | (s << 24);
5763 cond = INST_AL;
5764 }
5765 else /* Encoding T3 */
5766 {
5767 offset |= (bits (insn1, 0, 5) << 12)
5768 | (j1 << 18)
5769 | (j2 << 19)
5770 | (s << 20);
5771 cond = bits (insn1, 6, 9);
5772 }
5773 }
5774 else
5775 {
5776 offset = (bits (insn1, 0, 9) << 12);
5777 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5778 offset |= exchange ?
5779 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5780 }
5781
5782 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5783 link ? (exchange) ? "blx" : "bl" : "b",
5784 insn1, insn2, offset);
5785
5786 dsc->modinsn[0] = THUMB_NOP;
5787
5788 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5789 return 0;
5790 }
5791
5792 /* Copy B Thumb instructions. */
5793 static int
5794 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5795 arm_displaced_step_copy_insn_closure *dsc)
5796 {
5797 unsigned int cond = 0;
5798 int offset = 0;
5799 unsigned short bit_12_15 = bits (insn, 12, 15);
5800 CORE_ADDR from = dsc->insn_addr;
5801
5802 if (bit_12_15 == 0xd)
5803 {
5804 /* offset = SignExtend (imm8:0, 32) */
5805 offset = sbits ((insn << 1), 0, 8);
5806 cond = bits (insn, 8, 11);
5807 }
5808 else if (bit_12_15 == 0xe) /* Encoding T2 */
5809 {
5810 offset = sbits ((insn << 1), 0, 11);
5811 cond = INST_AL;
5812 }
5813
5814 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5815 insn, offset);
5816
5817 dsc->u.branch.cond = cond;
5818 dsc->u.branch.link = 0;
5819 dsc->u.branch.exchange = 0;
5820 dsc->u.branch.dest = from + 4 + offset;
5821
5822 dsc->modinsn[0] = THUMB_NOP;
5823
5824 dsc->cleanup = &cleanup_branch;
5825
5826 return 0;
5827 }
5828
5829 /* Copy BX/BLX with register-specified destinations. */
5830
5831 static void
5832 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5833 arm_displaced_step_copy_insn_closure *dsc, int link,
5834 unsigned int cond, unsigned int rm)
5835 {
5836 /* Implement {BX,BLX}<cond> <reg>" as:
5837
5838 Preparation: cond <- instruction condition
5839 Insn: mov r0, r0 (nop)
5840 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5841
5842 Don't set r14 in cleanup for BX. */
5843
5844 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5845
5846 dsc->u.branch.cond = cond;
5847 dsc->u.branch.link = link;
5848
5849 dsc->u.branch.exchange = 1;
5850
5851 dsc->cleanup = &cleanup_branch;
5852 }
5853
5854 static int
5855 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5856 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5857 {
5858 unsigned int cond = bits (insn, 28, 31);
5859 /* BX: x12xxx1x
5860 BLX: x12xxx3x. */
5861 int link = bit (insn, 5);
5862 unsigned int rm = bits (insn, 0, 3);
5863
5864 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5865
5866 dsc->modinsn[0] = ARM_NOP;
5867
5868 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5869 return 0;
5870 }
5871
5872 static int
5873 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5874 struct regcache *regs,
5875 arm_displaced_step_copy_insn_closure *dsc)
5876 {
5877 int link = bit (insn, 7);
5878 unsigned int rm = bits (insn, 3, 6);
5879
5880 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5881
5882 dsc->modinsn[0] = THUMB_NOP;
5883
5884 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5885
5886 return 0;
5887 }
5888
5889
5890 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5891
5892 static void
5893 cleanup_alu_imm (struct gdbarch *gdbarch,
5894 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5895 {
5896 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5897 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5898 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5899 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5900 }
5901
5902 static int
5903 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5904 arm_displaced_step_copy_insn_closure *dsc)
5905 {
5906 unsigned int rn = bits (insn, 16, 19);
5907 unsigned int rd = bits (insn, 12, 15);
5908 unsigned int op = bits (insn, 21, 24);
5909 int is_mov = (op == 0xd);
5910 ULONGEST rd_val, rn_val;
5911
5912 if (!insn_references_pc (insn, 0x000ff000ul))
5913 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5914
5915 displaced_debug_printf ("copying immediate %s insn %.8lx",
5916 is_mov ? "move" : "ALU",
5917 (unsigned long) insn);
5918
5919 /* Instruction is of form:
5920
5921 <op><cond> rd, [rn,] #imm
5922
5923 Rewrite as:
5924
5925 Preparation: tmp1, tmp2 <- r0, r1;
5926 r0, r1 <- rd, rn
5927 Insn: <op><cond> r0, r1, #imm
5928 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5929 */
5930
5931 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5932 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5933 rn_val = displaced_read_reg (regs, dsc, rn);
5934 rd_val = displaced_read_reg (regs, dsc, rd);
5935 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5936 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5937 dsc->rd = rd;
5938
5939 if (is_mov)
5940 dsc->modinsn[0] = insn & 0xfff00fff;
5941 else
5942 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5943
5944 dsc->cleanup = &cleanup_alu_imm;
5945
5946 return 0;
5947 }
5948
5949 static int
5950 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5951 uint16_t insn2, struct regcache *regs,
5952 arm_displaced_step_copy_insn_closure *dsc)
5953 {
5954 unsigned int op = bits (insn1, 5, 8);
5955 unsigned int rn, rm, rd;
5956 ULONGEST rd_val, rn_val;
5957
5958 rn = bits (insn1, 0, 3); /* Rn */
5959 rm = bits (insn2, 0, 3); /* Rm */
5960 rd = bits (insn2, 8, 11); /* Rd */
5961
5962 /* This routine is only called for instruction MOV. */
5963 gdb_assert (op == 0x2 && rn == 0xf);
5964
5965 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5966 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5967
5968 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5969
5970 /* Instruction is of form:
5971
5972 <op><cond> rd, [rn,] #imm
5973
5974 Rewrite as:
5975
5976 Preparation: tmp1, tmp2 <- r0, r1;
5977 r0, r1 <- rd, rn
5978 Insn: <op><cond> r0, r1, #imm
5979 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5980 */
5981
5982 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5983 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5984 rn_val = displaced_read_reg (regs, dsc, rn);
5985 rd_val = displaced_read_reg (regs, dsc, rd);
5986 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5987 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5988 dsc->rd = rd;
5989
5990 dsc->modinsn[0] = insn1;
5991 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5992 dsc->numinsns = 2;
5993
5994 dsc->cleanup = &cleanup_alu_imm;
5995
5996 return 0;
5997 }
5998
5999 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6000
6001 static void
6002 cleanup_alu_reg (struct gdbarch *gdbarch,
6003 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6004 {
6005 ULONGEST rd_val;
6006 int i;
6007
6008 rd_val = displaced_read_reg (regs, dsc, 0);
6009
6010 for (i = 0; i < 3; i++)
6011 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6012
6013 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6014 }
6015
6016 static void
6017 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6018 arm_displaced_step_copy_insn_closure *dsc,
6019 unsigned int rd, unsigned int rn, unsigned int rm)
6020 {
6021 ULONGEST rd_val, rn_val, rm_val;
6022
6023 /* Instruction is of form:
6024
6025 <op><cond> rd, [rn,] rm [, <shift>]
6026
6027 Rewrite as:
6028
6029 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6030 r0, r1, r2 <- rd, rn, rm
6031 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6032 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6033 */
6034
6035 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6036 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6037 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6038 rd_val = displaced_read_reg (regs, dsc, rd);
6039 rn_val = displaced_read_reg (regs, dsc, rn);
6040 rm_val = displaced_read_reg (regs, dsc, rm);
6041 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6042 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6043 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6044 dsc->rd = rd;
6045
6046 dsc->cleanup = &cleanup_alu_reg;
6047 }
6048
6049 static int
6050 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6051 arm_displaced_step_copy_insn_closure *dsc)
6052 {
6053 unsigned int op = bits (insn, 21, 24);
6054 int is_mov = (op == 0xd);
6055
6056 if (!insn_references_pc (insn, 0x000ff00ful))
6057 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6058
6059 displaced_debug_printf ("copying reg %s insn %.8lx",
6060 is_mov ? "move" : "ALU", (unsigned long) insn);
6061
6062 if (is_mov)
6063 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6064 else
6065 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6066
6067 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6068 bits (insn, 0, 3));
6069 return 0;
6070 }
6071
6072 static int
6073 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6074 struct regcache *regs,
6075 arm_displaced_step_copy_insn_closure *dsc)
6076 {
6077 unsigned rm, rd;
6078
6079 rm = bits (insn, 3, 6);
6080 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6081
6082 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6083 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6084
6085 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6086
6087 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6088
6089 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6090
6091 return 0;
6092 }
6093
6094 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6095
6096 static void
6097 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6098 struct regcache *regs,
6099 arm_displaced_step_copy_insn_closure *dsc)
6100 {
6101 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6102 int i;
6103
6104 for (i = 0; i < 4; i++)
6105 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6106
6107 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6108 }
6109
6110 static void
6111 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6112 arm_displaced_step_copy_insn_closure *dsc,
6113 unsigned int rd, unsigned int rn, unsigned int rm,
6114 unsigned rs)
6115 {
6116 int i;
6117 ULONGEST rd_val, rn_val, rm_val, rs_val;
6118
6119 /* Instruction is of form:
6120
6121 <op><cond> rd, [rn,] rm, <shift> rs
6122
6123 Rewrite as:
6124
6125 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6126 r0, r1, r2, r3 <- rd, rn, rm, rs
6127 Insn: <op><cond> r0, r1, r2, <shift> r3
6128 Cleanup: tmp5 <- r0
6129 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6130 rd <- tmp5
6131 */
6132
6133 for (i = 0; i < 4; i++)
6134 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6135
6136 rd_val = displaced_read_reg (regs, dsc, rd);
6137 rn_val = displaced_read_reg (regs, dsc, rn);
6138 rm_val = displaced_read_reg (regs, dsc, rm);
6139 rs_val = displaced_read_reg (regs, dsc, rs);
6140 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6141 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6142 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6143 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6144 dsc->rd = rd;
6145 dsc->cleanup = &cleanup_alu_shifted_reg;
6146 }
6147
6148 static int
6149 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6150 struct regcache *regs,
6151 arm_displaced_step_copy_insn_closure *dsc)
6152 {
6153 unsigned int op = bits (insn, 21, 24);
6154 int is_mov = (op == 0xd);
6155 unsigned int rd, rn, rm, rs;
6156
6157 if (!insn_references_pc (insn, 0x000fff0ful))
6158 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6159
6160 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6161 is_mov ? "move" : "ALU",
6162 (unsigned long) insn);
6163
6164 rn = bits (insn, 16, 19);
6165 rm = bits (insn, 0, 3);
6166 rs = bits (insn, 8, 11);
6167 rd = bits (insn, 12, 15);
6168
6169 if (is_mov)
6170 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6171 else
6172 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6173
6174 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6175
6176 return 0;
6177 }
6178
6179 /* Clean up load instructions. */
6180
6181 static void
6182 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6183 arm_displaced_step_copy_insn_closure *dsc)
6184 {
6185 ULONGEST rt_val, rt_val2 = 0, rn_val;
6186
6187 rt_val = displaced_read_reg (regs, dsc, 0);
6188 if (dsc->u.ldst.xfersize == 8)
6189 rt_val2 = displaced_read_reg (regs, dsc, 1);
6190 rn_val = displaced_read_reg (regs, dsc, 2);
6191
6192 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6193 if (dsc->u.ldst.xfersize > 4)
6194 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6195 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6196 if (!dsc->u.ldst.immed)
6197 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6198
6199 /* Handle register writeback. */
6200 if (dsc->u.ldst.writeback)
6201 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6202 /* Put result in right place. */
6203 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6204 if (dsc->u.ldst.xfersize == 8)
6205 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6206 }
6207
6208 /* Clean up store instructions. */
6209
6210 static void
6211 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6212 arm_displaced_step_copy_insn_closure *dsc)
6213 {
6214 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6215
6216 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6217 if (dsc->u.ldst.xfersize > 4)
6218 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6219 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6220 if (!dsc->u.ldst.immed)
6221 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6222 if (!dsc->u.ldst.restore_r4)
6223 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6224
6225 /* Writeback. */
6226 if (dsc->u.ldst.writeback)
6227 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6228 }
6229
6230 /* Copy "extra" load/store instructions. These are halfword/doubleword
6231 transfers, which have a different encoding to byte/word transfers. */
6232
6233 static int
6234 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6235 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6236 {
6237 unsigned int op1 = bits (insn, 20, 24);
6238 unsigned int op2 = bits (insn, 5, 6);
6239 unsigned int rt = bits (insn, 12, 15);
6240 unsigned int rn = bits (insn, 16, 19);
6241 unsigned int rm = bits (insn, 0, 3);
6242 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6243 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6244 int immed = (op1 & 0x4) != 0;
6245 int opcode;
6246 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6247
6248 if (!insn_references_pc (insn, 0x000ff00ful))
6249 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6250
6251 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6252 unprivileged ? "unprivileged " : "",
6253 (unsigned long) insn);
6254
6255 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6256
6257 if (opcode < 0)
6258 internal_error (__FILE__, __LINE__,
6259 _("copy_extra_ld_st: instruction decode error"));
6260
6261 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6262 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6263 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6264 if (!immed)
6265 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6266
6267 rt_val = displaced_read_reg (regs, dsc, rt);
6268 if (bytesize[opcode] == 8)
6269 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6270 rn_val = displaced_read_reg (regs, dsc, rn);
6271 if (!immed)
6272 rm_val = displaced_read_reg (regs, dsc, rm);
6273
6274 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6275 if (bytesize[opcode] == 8)
6276 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6277 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6278 if (!immed)
6279 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6280
6281 dsc->rd = rt;
6282 dsc->u.ldst.xfersize = bytesize[opcode];
6283 dsc->u.ldst.rn = rn;
6284 dsc->u.ldst.immed = immed;
6285 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6286 dsc->u.ldst.restore_r4 = 0;
6287
6288 if (immed)
6289 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6290 ->
6291 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6292 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6293 else
6294 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6295 ->
6296 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6297 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6298
6299 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6300
6301 return 0;
6302 }
6303
6304 /* Copy byte/half word/word loads and stores. */
6305
6306 static void
6307 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6308 arm_displaced_step_copy_insn_closure *dsc, int load,
6309 int immed, int writeback, int size, int usermode,
6310 int rt, int rm, int rn)
6311 {
6312 ULONGEST rt_val, rn_val, rm_val = 0;
6313
6314 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6315 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6316 if (!immed)
6317 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6318 if (!load)
6319 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6320
6321 rt_val = displaced_read_reg (regs, dsc, rt);
6322 rn_val = displaced_read_reg (regs, dsc, rn);
6323 if (!immed)
6324 rm_val = displaced_read_reg (regs, dsc, rm);
6325
6326 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6327 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6328 if (!immed)
6329 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6330 dsc->rd = rt;
6331 dsc->u.ldst.xfersize = size;
6332 dsc->u.ldst.rn = rn;
6333 dsc->u.ldst.immed = immed;
6334 dsc->u.ldst.writeback = writeback;
6335
6336 /* To write PC we can do:
6337
6338 Before this sequence of instructions:
6339 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6340 r2 is the Rn value got from displaced_read_reg.
6341
6342 Insn1: push {pc} Write address of STR instruction + offset on stack
6343 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6344 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6345 = addr(Insn1) + offset - addr(Insn3) - 8
6346 = offset - 16
6347 Insn4: add r4, r4, #8 r4 = offset - 8
6348 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6349 = from + offset
6350 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6351
6352 Otherwise we don't know what value to write for PC, since the offset is
6353 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6354 of this can be found in Section "Saving from r15" in
6355 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6356
6357 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6358 }
6359
6360
6361 static int
6362 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6363 uint16_t insn2, struct regcache *regs,
6364 arm_displaced_step_copy_insn_closure *dsc, int size)
6365 {
6366 unsigned int u_bit = bit (insn1, 7);
6367 unsigned int rt = bits (insn2, 12, 15);
6368 int imm12 = bits (insn2, 0, 11);
6369 ULONGEST pc_val;
6370
6371 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6372 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6373 imm12);
6374
6375 if (!u_bit)
6376 imm12 = -1 * imm12;
6377
6378 /* Rewrite instruction LDR Rt imm12 into:
6379
6380 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6381
6382 LDR R0, R2, R3,
6383
6384 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6385
6386
6387 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6388 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6389 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6390
6391 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6392
6393 pc_val = pc_val & 0xfffffffc;
6394
6395 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6396 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6397
6398 dsc->rd = rt;
6399
6400 dsc->u.ldst.xfersize = size;
6401 dsc->u.ldst.immed = 0;
6402 dsc->u.ldst.writeback = 0;
6403 dsc->u.ldst.restore_r4 = 0;
6404
6405 /* LDR R0, R2, R3 */
6406 dsc->modinsn[0] = 0xf852;
6407 dsc->modinsn[1] = 0x3;
6408 dsc->numinsns = 2;
6409
6410 dsc->cleanup = &cleanup_load;
6411
6412 return 0;
6413 }
6414
6415 static int
6416 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6417 uint16_t insn2, struct regcache *regs,
6418 arm_displaced_step_copy_insn_closure *dsc,
6419 int writeback, int immed)
6420 {
6421 unsigned int rt = bits (insn2, 12, 15);
6422 unsigned int rn = bits (insn1, 0, 3);
6423 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6424 /* In LDR (register), there is also a register Rm, which is not allowed to
6425 be PC, so we don't have to check it. */
6426
6427 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6428 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6429 dsc);
6430
6431 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6432 rt, rn, insn1, insn2);
6433
6434 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6435 0, rt, rm, rn);
6436
6437 dsc->u.ldst.restore_r4 = 0;
6438
6439 if (immed)
6440 /* ldr[b]<cond> rt, [rn, #imm], etc.
6441 ->
6442 ldr[b]<cond> r0, [r2, #imm]. */
6443 {
6444 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6445 dsc->modinsn[1] = insn2 & 0x0fff;
6446 }
6447 else
6448 /* ldr[b]<cond> rt, [rn, rm], etc.
6449 ->
6450 ldr[b]<cond> r0, [r2, r3]. */
6451 {
6452 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6453 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6454 }
6455
6456 dsc->numinsns = 2;
6457
6458 return 0;
6459 }
6460
6461
6462 static int
6463 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6464 struct regcache *regs,
6465 arm_displaced_step_copy_insn_closure *dsc,
6466 int load, int size, int usermode)
6467 {
6468 int immed = !bit (insn, 25);
6469 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6470 unsigned int rt = bits (insn, 12, 15);
6471 unsigned int rn = bits (insn, 16, 19);
6472 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6473
6474 if (!insn_references_pc (insn, 0x000ff00ful))
6475 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6476
6477 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6478 load ? (size == 1 ? "ldrb" : "ldr")
6479 : (size == 1 ? "strb" : "str"),
6480 usermode ? "t" : "",
6481 rt, rn,
6482 (unsigned long) insn);
6483
6484 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6485 usermode, rt, rm, rn);
6486
6487 if (load || rt != ARM_PC_REGNUM)
6488 {
6489 dsc->u.ldst.restore_r4 = 0;
6490
6491 if (immed)
6492 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6493 ->
6494 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6495 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6496 else
6497 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6498 ->
6499 {ldr,str}[b]<cond> r0, [r2, r3]. */
6500 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6501 }
6502 else
6503 {
6504 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6505 dsc->u.ldst.restore_r4 = 1;
6506 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6507 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6508 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6509 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6510 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6511
6512 /* As above. */
6513 if (immed)
6514 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6515 else
6516 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6517
6518 dsc->numinsns = 6;
6519 }
6520
6521 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6522
6523 return 0;
6524 }
6525
6526 /* Cleanup LDM instructions with fully-populated register list. This is an
6527 unfortunate corner case: it's impossible to implement correctly by modifying
6528 the instruction. The issue is as follows: we have an instruction,
6529
6530 ldm rN, {r0-r15}
6531
6532 which we must rewrite to avoid loading PC. A possible solution would be to
6533 do the load in two halves, something like (with suitable cleanup
6534 afterwards):
6535
6536 mov r8, rN
6537 ldm[id][ab] r8!, {r0-r7}
6538 str r7, <temp>
6539 ldm[id][ab] r8, {r7-r14}
6540 <bkpt>
6541
6542 but at present there's no suitable place for <temp>, since the scratch space
6543 is overwritten before the cleanup routine is called. For now, we simply
6544 emulate the instruction. */
6545
6546 static void
6547 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6548 arm_displaced_step_copy_insn_closure *dsc)
6549 {
6550 int inc = dsc->u.block.increment;
6551 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6552 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6553 uint32_t regmask = dsc->u.block.regmask;
6554 int regno = inc ? 0 : 15;
6555 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6556 int exception_return = dsc->u.block.load && dsc->u.block.user
6557 && (regmask & 0x8000) != 0;
6558 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6559 int do_transfer = condition_true (dsc->u.block.cond, status);
6560 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6561
6562 if (!do_transfer)
6563 return;
6564
6565 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6566 sensible we can do here. Complain loudly. */
6567 if (exception_return)
6568 error (_("Cannot single-step exception return"));
6569
6570 /* We don't handle any stores here for now. */
6571 gdb_assert (dsc->u.block.load != 0);
6572
6573 displaced_debug_printf ("emulating block transfer: %s %s %s",
6574 dsc->u.block.load ? "ldm" : "stm",
6575 dsc->u.block.increment ? "inc" : "dec",
6576 dsc->u.block.before ? "before" : "after");
6577
6578 while (regmask)
6579 {
6580 uint32_t memword;
6581
6582 if (inc)
6583 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6584 regno++;
6585 else
6586 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6587 regno--;
6588
6589 xfer_addr += bump_before;
6590
6591 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6592 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6593
6594 xfer_addr += bump_after;
6595
6596 regmask &= ~(1 << regno);
6597 }
6598
6599 if (dsc->u.block.writeback)
6600 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6601 CANNOT_WRITE_PC);
6602 }
6603
6604 /* Clean up an STM which included the PC in the register list. */
6605
6606 static void
6607 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6608 arm_displaced_step_copy_insn_closure *dsc)
6609 {
6610 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6611 int store_executed = condition_true (dsc->u.block.cond, status);
6612 CORE_ADDR pc_stored_at, transferred_regs
6613 = count_one_bits (dsc->u.block.regmask);
6614 CORE_ADDR stm_insn_addr;
6615 uint32_t pc_val;
6616 long offset;
6617 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6618
6619 /* If condition code fails, there's nothing else to do. */
6620 if (!store_executed)
6621 return;
6622
6623 if (dsc->u.block.increment)
6624 {
6625 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6626
6627 if (dsc->u.block.before)
6628 pc_stored_at += 4;
6629 }
6630 else
6631 {
6632 pc_stored_at = dsc->u.block.xfer_addr;
6633
6634 if (dsc->u.block.before)
6635 pc_stored_at -= 4;
6636 }
6637
6638 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6639 stm_insn_addr = dsc->scratch_base;
6640 offset = pc_val - stm_insn_addr;
6641
6642 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6643 offset);
6644
6645 /* Rewrite the stored PC to the proper value for the non-displaced original
6646 instruction. */
6647 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6648 dsc->insn_addr + offset);
6649 }
6650
6651 /* Clean up an LDM which includes the PC in the register list. We clumped all
6652 the registers in the transferred list into a contiguous range r0...rX (to
6653 avoid loading PC directly and losing control of the debugged program), so we
6654 must undo that here. */
6655
6656 static void
6657 cleanup_block_load_pc (struct gdbarch *gdbarch,
6658 struct regcache *regs,
6659 arm_displaced_step_copy_insn_closure *dsc)
6660 {
6661 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6662 int load_executed = condition_true (dsc->u.block.cond, status);
6663 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6664 unsigned int regs_loaded = count_one_bits (mask);
6665 unsigned int num_to_shuffle = regs_loaded, clobbered;
6666
6667 /* The method employed here will fail if the register list is fully populated
6668 (we need to avoid loading PC directly). */
6669 gdb_assert (num_to_shuffle < 16);
6670
6671 if (!load_executed)
6672 return;
6673
6674 clobbered = (1 << num_to_shuffle) - 1;
6675
6676 while (num_to_shuffle > 0)
6677 {
6678 if ((mask & (1 << write_reg)) != 0)
6679 {
6680 unsigned int read_reg = num_to_shuffle - 1;
6681
6682 if (read_reg != write_reg)
6683 {
6684 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6685 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6686 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6687 read_reg, write_reg);
6688 }
6689 else
6690 displaced_debug_printf ("LDM: register r%d already in the right "
6691 "place", write_reg);
6692
6693 clobbered &= ~(1 << write_reg);
6694
6695 num_to_shuffle--;
6696 }
6697
6698 write_reg--;
6699 }
6700
6701 /* Restore any registers we scribbled over. */
6702 for (write_reg = 0; clobbered != 0; write_reg++)
6703 {
6704 if ((clobbered & (1 << write_reg)) != 0)
6705 {
6706 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6707 CANNOT_WRITE_PC);
6708 displaced_debug_printf ("LDM: restored clobbered register r%d",
6709 write_reg);
6710 clobbered &= ~(1 << write_reg);
6711 }
6712 }
6713
6714 /* Perform register writeback manually. */
6715 if (dsc->u.block.writeback)
6716 {
6717 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6718
6719 if (dsc->u.block.increment)
6720 new_rn_val += regs_loaded * 4;
6721 else
6722 new_rn_val -= regs_loaded * 4;
6723
6724 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6725 CANNOT_WRITE_PC);
6726 }
6727 }
6728
6729 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6730 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6731
6732 static int
6733 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6734 struct regcache *regs,
6735 arm_displaced_step_copy_insn_closure *dsc)
6736 {
6737 int load = bit (insn, 20);
6738 int user = bit (insn, 22);
6739 int increment = bit (insn, 23);
6740 int before = bit (insn, 24);
6741 int writeback = bit (insn, 21);
6742 int rn = bits (insn, 16, 19);
6743
6744 /* Block transfers which don't mention PC can be run directly
6745 out-of-line. */
6746 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6747 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6748
6749 if (rn == ARM_PC_REGNUM)
6750 {
6751 warning (_("displaced: Unpredictable LDM or STM with "
6752 "base register r15"));
6753 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6754 }
6755
6756 displaced_debug_printf ("copying block transfer insn %.8lx",
6757 (unsigned long) insn);
6758
6759 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6760 dsc->u.block.rn = rn;
6761
6762 dsc->u.block.load = load;
6763 dsc->u.block.user = user;
6764 dsc->u.block.increment = increment;
6765 dsc->u.block.before = before;
6766 dsc->u.block.writeback = writeback;
6767 dsc->u.block.cond = bits (insn, 28, 31);
6768
6769 dsc->u.block.regmask = insn & 0xffff;
6770
6771 if (load)
6772 {
6773 if ((insn & 0xffff) == 0xffff)
6774 {
6775 /* LDM with a fully-populated register list. This case is
6776 particularly tricky. Implement for now by fully emulating the
6777 instruction (which might not behave perfectly in all cases, but
6778 these instructions should be rare enough for that not to matter
6779 too much). */
6780 dsc->modinsn[0] = ARM_NOP;
6781
6782 dsc->cleanup = &cleanup_block_load_all;
6783 }
6784 else
6785 {
6786 /* LDM of a list of registers which includes PC. Implement by
6787 rewriting the list of registers to be transferred into a
6788 contiguous chunk r0...rX before doing the transfer, then shuffling
6789 registers into the correct places in the cleanup routine. */
6790 unsigned int regmask = insn & 0xffff;
6791 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6792 unsigned int i;
6793
6794 for (i = 0; i < num_in_list; i++)
6795 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6796
6797 /* Writeback makes things complicated. We need to avoid clobbering
6798 the base register with one of the registers in our modified
6799 register list, but just using a different register can't work in
6800 all cases, e.g.:
6801
6802 ldm r14!, {r0-r13,pc}
6803
6804 which would need to be rewritten as:
6805
6806 ldm rN!, {r0-r14}
6807
6808 but that can't work, because there's no free register for N.
6809
6810 Solve this by turning off the writeback bit, and emulating
6811 writeback manually in the cleanup routine. */
6812
6813 if (writeback)
6814 insn &= ~(1 << 21);
6815
6816 new_regmask = (1 << num_in_list) - 1;
6817
6818 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6819 "%.4x, modified list %.4x",
6820 rn, writeback ? "!" : "",
6821 (int) insn & 0xffff, new_regmask);
6822
6823 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6824
6825 dsc->cleanup = &cleanup_block_load_pc;
6826 }
6827 }
6828 else
6829 {
6830 /* STM of a list of registers which includes PC. Run the instruction
6831 as-is, but out of line: this will store the wrong value for the PC,
6832 so we must manually fix up the memory in the cleanup routine.
6833 Doing things this way has the advantage that we can auto-detect
6834 the offset of the PC write (which is architecture-dependent) in
6835 the cleanup routine. */
6836 dsc->modinsn[0] = insn;
6837
6838 dsc->cleanup = &cleanup_block_store_pc;
6839 }
6840
6841 return 0;
6842 }
6843
6844 static int
6845 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6846 struct regcache *regs,
6847 arm_displaced_step_copy_insn_closure *dsc)
6848 {
6849 int rn = bits (insn1, 0, 3);
6850 int load = bit (insn1, 4);
6851 int writeback = bit (insn1, 5);
6852
6853 /* Block transfers which don't mention PC can be run directly
6854 out-of-line. */
6855 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6856 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6857
6858 if (rn == ARM_PC_REGNUM)
6859 {
6860 warning (_("displaced: Unpredictable LDM or STM with "
6861 "base register r15"));
6862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6863 "unpredictable ldm/stm", dsc);
6864 }
6865
6866 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6867 insn1, insn2);
6868
6869 /* Clear bit 13, since it should be always zero. */
6870 dsc->u.block.regmask = (insn2 & 0xdfff);
6871 dsc->u.block.rn = rn;
6872
6873 dsc->u.block.load = load;
6874 dsc->u.block.user = 0;
6875 dsc->u.block.increment = bit (insn1, 7);
6876 dsc->u.block.before = bit (insn1, 8);
6877 dsc->u.block.writeback = writeback;
6878 dsc->u.block.cond = INST_AL;
6879 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6880
6881 if (load)
6882 {
6883 if (dsc->u.block.regmask == 0xffff)
6884 {
6885 /* This branch is impossible to happen. */
6886 gdb_assert (0);
6887 }
6888 else
6889 {
6890 unsigned int regmask = dsc->u.block.regmask;
6891 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6892 unsigned int i;
6893
6894 for (i = 0; i < num_in_list; i++)
6895 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6896
6897 if (writeback)
6898 insn1 &= ~(1 << 5);
6899
6900 new_regmask = (1 << num_in_list) - 1;
6901
6902 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6903 "%.4x, modified list %.4x",
6904 rn, writeback ? "!" : "",
6905 (int) dsc->u.block.regmask, new_regmask);
6906
6907 dsc->modinsn[0] = insn1;
6908 dsc->modinsn[1] = (new_regmask & 0xffff);
6909 dsc->numinsns = 2;
6910
6911 dsc->cleanup = &cleanup_block_load_pc;
6912 }
6913 }
6914 else
6915 {
6916 dsc->modinsn[0] = insn1;
6917 dsc->modinsn[1] = insn2;
6918 dsc->numinsns = 2;
6919 dsc->cleanup = &cleanup_block_store_pc;
6920 }
6921 return 0;
6922 }
6923
6924 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6925 This is used to avoid a dependency on BFD's bfd_endian enum. */
6926
6927 ULONGEST
6928 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6929 int byte_order)
6930 {
6931 return read_memory_unsigned_integer (memaddr, len,
6932 (enum bfd_endian) byte_order);
6933 }
6934
6935 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6936
6937 CORE_ADDR
6938 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6939 CORE_ADDR val)
6940 {
6941 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6942 }
6943
6944 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6945
6946 static CORE_ADDR
6947 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6948 {
6949 return 0;
6950 }
6951
6952 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6953
6954 int
6955 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6956 {
6957 return arm_is_thumb (self->regcache);
6958 }
6959
6960 /* single_step() is called just before we want to resume the inferior,
6961 if we want to single-step it but there is no hardware or kernel
6962 single-step support. We find the target of the coming instructions
6963 and breakpoint them. */
6964
6965 std::vector<CORE_ADDR>
6966 arm_software_single_step (struct regcache *regcache)
6967 {
6968 struct gdbarch *gdbarch = regcache->arch ();
6969 struct arm_get_next_pcs next_pcs_ctx;
6970
6971 arm_get_next_pcs_ctor (&next_pcs_ctx,
6972 &arm_get_next_pcs_ops,
6973 gdbarch_byte_order (gdbarch),
6974 gdbarch_byte_order_for_code (gdbarch),
6975 0,
6976 regcache);
6977
6978 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6979
6980 for (CORE_ADDR &pc_ref : next_pcs)
6981 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6982
6983 return next_pcs;
6984 }
6985
6986 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6987 for Linux, where some SVC instructions must be treated specially. */
6988
6989 static void
6990 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6991 arm_displaced_step_copy_insn_closure *dsc)
6992 {
6993 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6994
6995 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6996 (unsigned long) resume_addr);
6997
6998 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6999 }
7000
7001
7002 /* Common copy routine for svc instruction. */
7003
7004 static int
7005 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7006 arm_displaced_step_copy_insn_closure *dsc)
7007 {
7008 /* Preparation: none.
7009 Insn: unmodified svc.
7010 Cleanup: pc <- insn_addr + insn_size. */
7011
7012 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7013 instruction. */
7014 dsc->wrote_to_pc = 1;
7015
7016 /* Allow OS-specific code to override SVC handling. */
7017 if (dsc->u.svc.copy_svc_os)
7018 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7019 else
7020 {
7021 dsc->cleanup = &cleanup_svc;
7022 return 0;
7023 }
7024 }
7025
7026 static int
7027 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7028 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7029 {
7030
7031 displaced_debug_printf ("copying svc insn %.8lx",
7032 (unsigned long) insn);
7033
7034 dsc->modinsn[0] = insn;
7035
7036 return install_svc (gdbarch, regs, dsc);
7037 }
7038
7039 static int
7040 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7041 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7042 {
7043
7044 displaced_debug_printf ("copying svc insn %.4x", insn);
7045
7046 dsc->modinsn[0] = insn;
7047
7048 return install_svc (gdbarch, regs, dsc);
7049 }
7050
7051 /* Copy undefined instructions. */
7052
7053 static int
7054 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7055 arm_displaced_step_copy_insn_closure *dsc)
7056 {
7057 displaced_debug_printf ("copying undefined insn %.8lx",
7058 (unsigned long) insn);
7059
7060 dsc->modinsn[0] = insn;
7061
7062 return 0;
7063 }
7064
7065 static int
7066 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7067 arm_displaced_step_copy_insn_closure *dsc)
7068 {
7069
7070 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7071 (unsigned short) insn1, (unsigned short) insn2);
7072
7073 dsc->modinsn[0] = insn1;
7074 dsc->modinsn[1] = insn2;
7075 dsc->numinsns = 2;
7076
7077 return 0;
7078 }
7079
7080 /* Copy unpredictable instructions. */
7081
7082 static int
7083 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7084 arm_displaced_step_copy_insn_closure *dsc)
7085 {
7086 displaced_debug_printf ("copying unpredictable insn %.8lx",
7087 (unsigned long) insn);
7088
7089 dsc->modinsn[0] = insn;
7090
7091 return 0;
7092 }
7093
7094 /* The decode_* functions are instruction decoding helpers. They mostly follow
7095 the presentation in the ARM ARM. */
7096
7097 static int
7098 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7099 struct regcache *regs,
7100 arm_displaced_step_copy_insn_closure *dsc)
7101 {
7102 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7103 unsigned int rn = bits (insn, 16, 19);
7104
7105 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7106 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7107 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7108 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7109 else if ((op1 & 0x60) == 0x20)
7110 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7111 else if ((op1 & 0x71) == 0x40)
7112 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7113 dsc);
7114 else if ((op1 & 0x77) == 0x41)
7115 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7116 else if ((op1 & 0x77) == 0x45)
7117 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7118 else if ((op1 & 0x77) == 0x51)
7119 {
7120 if (rn != 0xf)
7121 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7122 else
7123 return arm_copy_unpred (gdbarch, insn, dsc);
7124 }
7125 else if ((op1 & 0x77) == 0x55)
7126 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7127 else if (op1 == 0x57)
7128 switch (op2)
7129 {
7130 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7131 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7132 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7133 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7134 default: return arm_copy_unpred (gdbarch, insn, dsc);
7135 }
7136 else if ((op1 & 0x63) == 0x43)
7137 return arm_copy_unpred (gdbarch, insn, dsc);
7138 else if ((op2 & 0x1) == 0x0)
7139 switch (op1 & ~0x80)
7140 {
7141 case 0x61:
7142 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7143 case 0x65:
7144 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7145 case 0x71: case 0x75:
7146 /* pld/pldw reg. */
7147 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7148 case 0x63: case 0x67: case 0x73: case 0x77:
7149 return arm_copy_unpred (gdbarch, insn, dsc);
7150 default:
7151 return arm_copy_undef (gdbarch, insn, dsc);
7152 }
7153 else
7154 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7155 }
7156
7157 static int
7158 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7159 struct regcache *regs,
7160 arm_displaced_step_copy_insn_closure *dsc)
7161 {
7162 if (bit (insn, 27) == 0)
7163 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7164 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7165 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7166 {
7167 case 0x0: case 0x2:
7168 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7169
7170 case 0x1: case 0x3:
7171 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7172
7173 case 0x4: case 0x5: case 0x6: case 0x7:
7174 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7175
7176 case 0x8:
7177 switch ((insn & 0xe00000) >> 21)
7178 {
7179 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7180 /* stc/stc2. */
7181 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7182
7183 case 0x2:
7184 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7185
7186 default:
7187 return arm_copy_undef (gdbarch, insn, dsc);
7188 }
7189
7190 case 0x9:
7191 {
7192 int rn_f = (bits (insn, 16, 19) == 0xf);
7193 switch ((insn & 0xe00000) >> 21)
7194 {
7195 case 0x1: case 0x3:
7196 /* ldc/ldc2 imm (undefined for rn == pc). */
7197 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7198 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7199
7200 case 0x2:
7201 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7202
7203 case 0x4: case 0x5: case 0x6: case 0x7:
7204 /* ldc/ldc2 lit (undefined for rn != pc). */
7205 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7206 : arm_copy_undef (gdbarch, insn, dsc);
7207
7208 default:
7209 return arm_copy_undef (gdbarch, insn, dsc);
7210 }
7211 }
7212
7213 case 0xa:
7214 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7215
7216 case 0xb:
7217 if (bits (insn, 16, 19) == 0xf)
7218 /* ldc/ldc2 lit. */
7219 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7220 else
7221 return arm_copy_undef (gdbarch, insn, dsc);
7222
7223 case 0xc:
7224 if (bit (insn, 4))
7225 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7226 else
7227 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7228
7229 case 0xd:
7230 if (bit (insn, 4))
7231 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7232 else
7233 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7234
7235 default:
7236 return arm_copy_undef (gdbarch, insn, dsc);
7237 }
7238 }
7239
7240 /* Decode miscellaneous instructions in dp/misc encoding space. */
7241
7242 static int
7243 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7244 struct regcache *regs,
7245 arm_displaced_step_copy_insn_closure *dsc)
7246 {
7247 unsigned int op2 = bits (insn, 4, 6);
7248 unsigned int op = bits (insn, 21, 22);
7249
7250 switch (op2)
7251 {
7252 case 0x0:
7253 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7254
7255 case 0x1:
7256 if (op == 0x1) /* bx. */
7257 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7258 else if (op == 0x3)
7259 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7260 else
7261 return arm_copy_undef (gdbarch, insn, dsc);
7262
7263 case 0x2:
7264 if (op == 0x1)
7265 /* Not really supported. */
7266 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7267 else
7268 return arm_copy_undef (gdbarch, insn, dsc);
7269
7270 case 0x3:
7271 if (op == 0x1)
7272 return arm_copy_bx_blx_reg (gdbarch, insn,
7273 regs, dsc); /* blx register. */
7274 else
7275 return arm_copy_undef (gdbarch, insn, dsc);
7276
7277 case 0x5:
7278 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7279
7280 case 0x7:
7281 if (op == 0x1)
7282 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7283 else if (op == 0x3)
7284 /* Not really supported. */
7285 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7286 /* Fall through. */
7287
7288 default:
7289 return arm_copy_undef (gdbarch, insn, dsc);
7290 }
7291 }
7292
7293 static int
7294 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7295 struct regcache *regs,
7296 arm_displaced_step_copy_insn_closure *dsc)
7297 {
7298 if (bit (insn, 25))
7299 switch (bits (insn, 20, 24))
7300 {
7301 case 0x10:
7302 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7303
7304 case 0x14:
7305 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7306
7307 case 0x12: case 0x16:
7308 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7309
7310 default:
7311 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7312 }
7313 else
7314 {
7315 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7316
7317 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7318 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7319 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7320 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7321 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7322 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7323 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7324 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7325 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7326 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7327 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7328 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7329 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7330 /* 2nd arg means "unprivileged". */
7331 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7332 dsc);
7333 }
7334
7335 /* Should be unreachable. */
7336 return 1;
7337 }
7338
7339 static int
7340 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7341 struct regcache *regs,
7342 arm_displaced_step_copy_insn_closure *dsc)
7343 {
7344 int a = bit (insn, 25), b = bit (insn, 4);
7345 uint32_t op1 = bits (insn, 20, 24);
7346
7347 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7348 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7349 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7350 else if ((!a && (op1 & 0x17) == 0x02)
7351 || (a && (op1 & 0x17) == 0x02 && !b))
7352 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7353 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7354 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7355 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7356 else if ((!a && (op1 & 0x17) == 0x03)
7357 || (a && (op1 & 0x17) == 0x03 && !b))
7358 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7359 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7360 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7361 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7362 else if ((!a && (op1 & 0x17) == 0x06)
7363 || (a && (op1 & 0x17) == 0x06 && !b))
7364 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7365 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7366 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7367 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7368 else if ((!a && (op1 & 0x17) == 0x07)
7369 || (a && (op1 & 0x17) == 0x07 && !b))
7370 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7371
7372 /* Should be unreachable. */
7373 return 1;
7374 }
7375
7376 static int
7377 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7378 arm_displaced_step_copy_insn_closure *dsc)
7379 {
7380 switch (bits (insn, 20, 24))
7381 {
7382 case 0x00: case 0x01: case 0x02: case 0x03:
7383 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7384
7385 case 0x04: case 0x05: case 0x06: case 0x07:
7386 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7387
7388 case 0x08: case 0x09: case 0x0a: case 0x0b:
7389 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7390 return arm_copy_unmodified (gdbarch, insn,
7391 "decode/pack/unpack/saturate/reverse", dsc);
7392
7393 case 0x18:
7394 if (bits (insn, 5, 7) == 0) /* op2. */
7395 {
7396 if (bits (insn, 12, 15) == 0xf)
7397 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7398 else
7399 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7400 }
7401 else
7402 return arm_copy_undef (gdbarch, insn, dsc);
7403
7404 case 0x1a: case 0x1b:
7405 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7406 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7407 else
7408 return arm_copy_undef (gdbarch, insn, dsc);
7409
7410 case 0x1c: case 0x1d:
7411 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7412 {
7413 if (bits (insn, 0, 3) == 0xf)
7414 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7415 else
7416 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7417 }
7418 else
7419 return arm_copy_undef (gdbarch, insn, dsc);
7420
7421 case 0x1e: case 0x1f:
7422 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7423 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7424 else
7425 return arm_copy_undef (gdbarch, insn, dsc);
7426 }
7427
7428 /* Should be unreachable. */
7429 return 1;
7430 }
7431
7432 static int
7433 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7434 struct regcache *regs,
7435 arm_displaced_step_copy_insn_closure *dsc)
7436 {
7437 if (bit (insn, 25))
7438 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7439 else
7440 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7441 }
7442
7443 static int
7444 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7445 struct regcache *regs,
7446 arm_displaced_step_copy_insn_closure *dsc)
7447 {
7448 unsigned int opcode = bits (insn, 20, 24);
7449
7450 switch (opcode)
7451 {
7452 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7453 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7454
7455 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7456 case 0x12: case 0x16:
7457 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7458
7459 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7460 case 0x13: case 0x17:
7461 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7462
7463 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7464 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7465 /* Note: no writeback for these instructions. Bit 25 will always be
7466 zero though (via caller), so the following works OK. */
7467 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7468 }
7469
7470 /* Should be unreachable. */
7471 return 1;
7472 }
7473
7474 /* Decode shifted register instructions. */
7475
7476 static int
7477 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7478 uint16_t insn2, struct regcache *regs,
7479 arm_displaced_step_copy_insn_closure *dsc)
7480 {
7481 /* PC is only allowed to be used in instruction MOV. */
7482
7483 unsigned int op = bits (insn1, 5, 8);
7484 unsigned int rn = bits (insn1, 0, 3);
7485
7486 if (op == 0x2 && rn == 0xf) /* MOV */
7487 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7488 else
7489 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7490 "dp (shift reg)", dsc);
7491 }
7492
7493
7494 /* Decode extension register load/store. Exactly the same as
7495 arm_decode_ext_reg_ld_st. */
7496
7497 static int
7498 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7499 uint16_t insn2, struct regcache *regs,
7500 arm_displaced_step_copy_insn_closure *dsc)
7501 {
7502 unsigned int opcode = bits (insn1, 4, 8);
7503
7504 switch (opcode)
7505 {
7506 case 0x04: case 0x05:
7507 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7508 "vfp/neon vmov", dsc);
7509
7510 case 0x08: case 0x0c: /* 01x00 */
7511 case 0x0a: case 0x0e: /* 01x10 */
7512 case 0x12: case 0x16: /* 10x10 */
7513 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7514 "vfp/neon vstm/vpush", dsc);
7515
7516 case 0x09: case 0x0d: /* 01x01 */
7517 case 0x0b: case 0x0f: /* 01x11 */
7518 case 0x13: case 0x17: /* 10x11 */
7519 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7520 "vfp/neon vldm/vpop", dsc);
7521
7522 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7523 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7524 "vstr", dsc);
7525 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7526 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7527 }
7528
7529 /* Should be unreachable. */
7530 return 1;
7531 }
7532
7533 static int
7534 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7535 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7536 {
7537 unsigned int op1 = bits (insn, 20, 25);
7538 int op = bit (insn, 4);
7539 unsigned int coproc = bits (insn, 8, 11);
7540
7541 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7542 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7543 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7544 && (coproc & 0xe) != 0xa)
7545 /* stc/stc2. */
7546 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7547 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7548 && (coproc & 0xe) != 0xa)
7549 /* ldc/ldc2 imm/lit. */
7550 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7551 else if ((op1 & 0x3e) == 0x00)
7552 return arm_copy_undef (gdbarch, insn, dsc);
7553 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7554 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7555 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7556 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7557 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7558 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7559 else if ((op1 & 0x30) == 0x20 && !op)
7560 {
7561 if ((coproc & 0xe) == 0xa)
7562 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7563 else
7564 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7565 }
7566 else if ((op1 & 0x30) == 0x20 && op)
7567 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7568 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7569 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7570 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7571 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7572 else if ((op1 & 0x30) == 0x30)
7573 return arm_copy_svc (gdbarch, insn, regs, dsc);
7574 else
7575 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7576 }
7577
7578 static int
7579 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7580 uint16_t insn2, struct regcache *regs,
7581 arm_displaced_step_copy_insn_closure *dsc)
7582 {
7583 unsigned int coproc = bits (insn2, 8, 11);
7584 unsigned int bit_5_8 = bits (insn1, 5, 8);
7585 unsigned int bit_9 = bit (insn1, 9);
7586 unsigned int bit_4 = bit (insn1, 4);
7587
7588 if (bit_9 == 0)
7589 {
7590 if (bit_5_8 == 2)
7591 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7592 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7593 dsc);
7594 else if (bit_5_8 == 0) /* UNDEFINED. */
7595 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7596 else
7597 {
7598 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7599 if ((coproc & 0xe) == 0xa)
7600 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7601 dsc);
7602 else /* coproc is not 101x. */
7603 {
7604 if (bit_4 == 0) /* STC/STC2. */
7605 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7606 "stc/stc2", dsc);
7607 else /* LDC/LDC2 {literal, immediate}. */
7608 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7609 regs, dsc);
7610 }
7611 }
7612 }
7613 else
7614 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7615
7616 return 0;
7617 }
7618
7619 static void
7620 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7621 arm_displaced_step_copy_insn_closure *dsc, int rd)
7622 {
7623 /* ADR Rd, #imm
7624
7625 Rewrite as:
7626
7627 Preparation: Rd <- PC
7628 Insn: ADD Rd, #imm
7629 Cleanup: Null.
7630 */
7631
7632 /* Rd <- PC */
7633 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7634 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7635 }
7636
7637 static int
7638 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7639 arm_displaced_step_copy_insn_closure *dsc,
7640 int rd, unsigned int imm)
7641 {
7642
7643 /* Encoding T2: ADDS Rd, #imm */
7644 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7645
7646 install_pc_relative (gdbarch, regs, dsc, rd);
7647
7648 return 0;
7649 }
7650
7651 static int
7652 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7653 struct regcache *regs,
7654 arm_displaced_step_copy_insn_closure *dsc)
7655 {
7656 unsigned int rd = bits (insn, 8, 10);
7657 unsigned int imm8 = bits (insn, 0, 7);
7658
7659 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7660 rd, imm8, insn);
7661
7662 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7663 }
7664
7665 static int
7666 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7667 uint16_t insn2, struct regcache *regs,
7668 arm_displaced_step_copy_insn_closure *dsc)
7669 {
7670 unsigned int rd = bits (insn2, 8, 11);
7671 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7672 extract raw immediate encoding rather than computing immediate. When
7673 generating ADD or SUB instruction, we can simply perform OR operation to
7674 set immediate into ADD. */
7675 unsigned int imm_3_8 = insn2 & 0x70ff;
7676 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7677
7678 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7679 rd, imm_i, imm_3_8, insn1, insn2);
7680
7681 if (bit (insn1, 7)) /* Encoding T2 */
7682 {
7683 /* Encoding T3: SUB Rd, Rd, #imm */
7684 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7685 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7686 }
7687 else /* Encoding T3 */
7688 {
7689 /* Encoding T3: ADD Rd, Rd, #imm */
7690 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7691 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7692 }
7693 dsc->numinsns = 2;
7694
7695 install_pc_relative (gdbarch, regs, dsc, rd);
7696
7697 return 0;
7698 }
7699
7700 static int
7701 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7702 struct regcache *regs,
7703 arm_displaced_step_copy_insn_closure *dsc)
7704 {
7705 unsigned int rt = bits (insn1, 8, 10);
7706 unsigned int pc;
7707 int imm8 = (bits (insn1, 0, 7) << 2);
7708
7709 /* LDR Rd, #imm8
7710
7711 Rwrite as:
7712
7713 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7714
7715 Insn: LDR R0, [R2, R3];
7716 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7717
7718 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7719
7720 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7721 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7722 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7723 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7724 /* The assembler calculates the required value of the offset from the
7725 Align(PC,4) value of this instruction to the label. */
7726 pc = pc & 0xfffffffc;
7727
7728 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7729 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7730
7731 dsc->rd = rt;
7732 dsc->u.ldst.xfersize = 4;
7733 dsc->u.ldst.rn = 0;
7734 dsc->u.ldst.immed = 0;
7735 dsc->u.ldst.writeback = 0;
7736 dsc->u.ldst.restore_r4 = 0;
7737
7738 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7739
7740 dsc->cleanup = &cleanup_load;
7741
7742 return 0;
7743 }
7744
7745 /* Copy Thumb cbnz/cbz instruction. */
7746
7747 static int
7748 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7749 struct regcache *regs,
7750 arm_displaced_step_copy_insn_closure *dsc)
7751 {
7752 int non_zero = bit (insn1, 11);
7753 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7754 CORE_ADDR from = dsc->insn_addr;
7755 int rn = bits (insn1, 0, 2);
7756 int rn_val = displaced_read_reg (regs, dsc, rn);
7757
7758 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7759 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7760 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7761 condition is false, let it be, cleanup_branch will do nothing. */
7762 if (dsc->u.branch.cond)
7763 {
7764 dsc->u.branch.cond = INST_AL;
7765 dsc->u.branch.dest = from + 4 + imm5;
7766 }
7767 else
7768 dsc->u.branch.dest = from + 2;
7769
7770 dsc->u.branch.link = 0;
7771 dsc->u.branch.exchange = 0;
7772
7773 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7774 non_zero ? "cbnz" : "cbz",
7775 rn, rn_val, insn1, dsc->u.branch.dest);
7776
7777 dsc->modinsn[0] = THUMB_NOP;
7778
7779 dsc->cleanup = &cleanup_branch;
7780 return 0;
7781 }
7782
7783 /* Copy Table Branch Byte/Halfword */
7784 static int
7785 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7786 uint16_t insn2, struct regcache *regs,
7787 arm_displaced_step_copy_insn_closure *dsc)
7788 {
7789 ULONGEST rn_val, rm_val;
7790 int is_tbh = bit (insn2, 4);
7791 CORE_ADDR halfwords = 0;
7792 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7793
7794 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7795 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7796
7797 if (is_tbh)
7798 {
7799 gdb_byte buf[2];
7800
7801 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7802 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7803 }
7804 else
7805 {
7806 gdb_byte buf[1];
7807
7808 target_read_memory (rn_val + rm_val, buf, 1);
7809 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7810 }
7811
7812 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7813 is_tbh ? "tbh" : "tbb",
7814 (unsigned int) rn_val, (unsigned int) rm_val,
7815 (unsigned int) halfwords);
7816
7817 dsc->u.branch.cond = INST_AL;
7818 dsc->u.branch.link = 0;
7819 dsc->u.branch.exchange = 0;
7820 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7821
7822 dsc->cleanup = &cleanup_branch;
7823
7824 return 0;
7825 }
7826
7827 static void
7828 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7829 arm_displaced_step_copy_insn_closure *dsc)
7830 {
7831 /* PC <- r7 */
7832 int val = displaced_read_reg (regs, dsc, 7);
7833 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7834
7835 /* r7 <- r8 */
7836 val = displaced_read_reg (regs, dsc, 8);
7837 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7838
7839 /* r8 <- tmp[0] */
7840 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7841
7842 }
7843
7844 static int
7845 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7846 struct regcache *regs,
7847 arm_displaced_step_copy_insn_closure *dsc)
7848 {
7849 dsc->u.block.regmask = insn1 & 0x00ff;
7850
7851 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7852 to :
7853
7854 (1) register list is full, that is, r0-r7 are used.
7855 Prepare: tmp[0] <- r8
7856
7857 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7858 MOV r8, r7; Move value of r7 to r8;
7859 POP {r7}; Store PC value into r7.
7860
7861 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7862
7863 (2) register list is not full, supposing there are N registers in
7864 register list (except PC, 0 <= N <= 7).
7865 Prepare: for each i, 0 - N, tmp[i] <- ri.
7866
7867 POP {r0, r1, ...., rN};
7868
7869 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7870 from tmp[] properly.
7871 */
7872 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7873 dsc->u.block.regmask, insn1);
7874
7875 if (dsc->u.block.regmask == 0xff)
7876 {
7877 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7878
7879 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7880 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7881 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7882
7883 dsc->numinsns = 3;
7884 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7885 }
7886 else
7887 {
7888 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7889 unsigned int i;
7890 unsigned int new_regmask;
7891
7892 for (i = 0; i < num_in_list + 1; i++)
7893 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7894
7895 new_regmask = (1 << (num_in_list + 1)) - 1;
7896
7897 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7898 "modified list %.4x",
7899 (int) dsc->u.block.regmask, new_regmask);
7900
7901 dsc->u.block.regmask |= 0x8000;
7902 dsc->u.block.writeback = 0;
7903 dsc->u.block.cond = INST_AL;
7904
7905 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7906
7907 dsc->cleanup = &cleanup_block_load_pc;
7908 }
7909
7910 return 0;
7911 }
7912
7913 static void
7914 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7915 struct regcache *regs,
7916 arm_displaced_step_copy_insn_closure *dsc)
7917 {
7918 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7919 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7920 int err = 0;
7921
7922 /* 16-bit thumb instructions. */
7923 switch (op_bit_12_15)
7924 {
7925 /* Shift (imme), add, subtract, move and compare. */
7926 case 0: case 1: case 2: case 3:
7927 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7928 "shift/add/sub/mov/cmp",
7929 dsc);
7930 break;
7931 case 4:
7932 switch (op_bit_10_11)
7933 {
7934 case 0: /* Data-processing */
7935 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7936 "data-processing",
7937 dsc);
7938 break;
7939 case 1: /* Special data instructions and branch and exchange. */
7940 {
7941 unsigned short op = bits (insn1, 7, 9);
7942 if (op == 6 || op == 7) /* BX or BLX */
7943 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7944 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7945 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7946 else
7947 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7948 dsc);
7949 }
7950 break;
7951 default: /* LDR (literal) */
7952 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7953 }
7954 break;
7955 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7956 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7957 break;
7958 case 10:
7959 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7960 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7961 else /* Generate SP-relative address */
7962 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7963 break;
7964 case 11: /* Misc 16-bit instructions */
7965 {
7966 switch (bits (insn1, 8, 11))
7967 {
7968 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7969 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7970 break;
7971 case 12: case 13: /* POP */
7972 if (bit (insn1, 8)) /* PC is in register list. */
7973 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7974 else
7975 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7976 break;
7977 case 15: /* If-Then, and hints */
7978 if (bits (insn1, 0, 3))
7979 /* If-Then makes up to four following instructions conditional.
7980 IT instruction itself is not conditional, so handle it as a
7981 common unmodified instruction. */
7982 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7983 dsc);
7984 else
7985 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7986 break;
7987 default:
7988 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7989 }
7990 }
7991 break;
7992 case 12:
7993 if (op_bit_10_11 < 2) /* Store multiple registers */
7994 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7995 else /* Load multiple registers */
7996 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7997 break;
7998 case 13: /* Conditional branch and supervisor call */
7999 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8000 err = thumb_copy_b (gdbarch, insn1, dsc);
8001 else
8002 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8003 break;
8004 case 14: /* Unconditional branch */
8005 err = thumb_copy_b (gdbarch, insn1, dsc);
8006 break;
8007 default:
8008 err = 1;
8009 }
8010
8011 if (err)
8012 internal_error (__FILE__, __LINE__,
8013 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8014 }
8015
8016 static int
8017 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8018 uint16_t insn1, uint16_t insn2,
8019 struct regcache *regs,
8020 arm_displaced_step_copy_insn_closure *dsc)
8021 {
8022 int rt = bits (insn2, 12, 15);
8023 int rn = bits (insn1, 0, 3);
8024 int op1 = bits (insn1, 7, 8);
8025
8026 switch (bits (insn1, 5, 6))
8027 {
8028 case 0: /* Load byte and memory hints */
8029 if (rt == 0xf) /* PLD/PLI */
8030 {
8031 if (rn == 0xf)
8032 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8033 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8034 else
8035 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8036 "pli/pld", dsc);
8037 }
8038 else
8039 {
8040 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8041 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8042 1);
8043 else
8044 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8045 "ldrb{reg, immediate}/ldrbt",
8046 dsc);
8047 }
8048
8049 break;
8050 case 1: /* Load halfword and memory hints. */
8051 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8052 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8053 "pld/unalloc memhint", dsc);
8054 else
8055 {
8056 if (rn == 0xf)
8057 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8058 2);
8059 else
8060 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8061 "ldrh/ldrht", dsc);
8062 }
8063 break;
8064 case 2: /* Load word */
8065 {
8066 int insn2_bit_8_11 = bits (insn2, 8, 11);
8067
8068 if (rn == 0xf)
8069 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8070 else if (op1 == 0x1) /* Encoding T3 */
8071 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8072 0, 1);
8073 else /* op1 == 0x0 */
8074 {
8075 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8076 /* LDR (immediate) */
8077 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8078 dsc, bit (insn2, 8), 1);
8079 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8080 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8081 "ldrt", dsc);
8082 else
8083 /* LDR (register) */
8084 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8085 dsc, 0, 0);
8086 }
8087 break;
8088 }
8089 default:
8090 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8091 break;
8092 }
8093 return 0;
8094 }
8095
8096 static void
8097 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8098 uint16_t insn2, struct regcache *regs,
8099 arm_displaced_step_copy_insn_closure *dsc)
8100 {
8101 int err = 0;
8102 unsigned short op = bit (insn2, 15);
8103 unsigned int op1 = bits (insn1, 11, 12);
8104
8105 switch (op1)
8106 {
8107 case 1:
8108 {
8109 switch (bits (insn1, 9, 10))
8110 {
8111 case 0:
8112 if (bit (insn1, 6))
8113 {
8114 /* Load/store {dual, exclusive}, table branch. */
8115 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8116 && bits (insn2, 5, 7) == 0)
8117 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8118 dsc);
8119 else
8120 /* PC is not allowed to use in load/store {dual, exclusive}
8121 instructions. */
8122 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8123 "load/store dual/ex", dsc);
8124 }
8125 else /* load/store multiple */
8126 {
8127 switch (bits (insn1, 7, 8))
8128 {
8129 case 0: case 3: /* SRS, RFE */
8130 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8131 "srs/rfe", dsc);
8132 break;
8133 case 1: case 2: /* LDM/STM/PUSH/POP */
8134 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8135 break;
8136 }
8137 }
8138 break;
8139
8140 case 1:
8141 /* Data-processing (shift register). */
8142 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8143 dsc);
8144 break;
8145 default: /* Coprocessor instructions. */
8146 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8147 break;
8148 }
8149 break;
8150 }
8151 case 2: /* op1 = 2 */
8152 if (op) /* Branch and misc control. */
8153 {
8154 if (bit (insn2, 14) /* BLX/BL */
8155 || bit (insn2, 12) /* Unconditional branch */
8156 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8157 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8158 else
8159 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8160 "misc ctrl", dsc);
8161 }
8162 else
8163 {
8164 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8165 {
8166 int dp_op = bits (insn1, 4, 8);
8167 int rn = bits (insn1, 0, 3);
8168 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8169 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8170 regs, dsc);
8171 else
8172 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8173 "dp/pb", dsc);
8174 }
8175 else /* Data processing (modified immediate) */
8176 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8177 "dp/mi", dsc);
8178 }
8179 break;
8180 case 3: /* op1 = 3 */
8181 switch (bits (insn1, 9, 10))
8182 {
8183 case 0:
8184 if (bit (insn1, 4))
8185 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8186 regs, dsc);
8187 else /* NEON Load/Store and Store single data item */
8188 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8189 "neon elt/struct load/store",
8190 dsc);
8191 break;
8192 case 1: /* op1 = 3, bits (9, 10) == 1 */
8193 switch (bits (insn1, 7, 8))
8194 {
8195 case 0: case 1: /* Data processing (register) */
8196 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8197 "dp(reg)", dsc);
8198 break;
8199 case 2: /* Multiply and absolute difference */
8200 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8201 "mul/mua/diff", dsc);
8202 break;
8203 case 3: /* Long multiply and divide */
8204 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8205 "lmul/lmua", dsc);
8206 break;
8207 }
8208 break;
8209 default: /* Coprocessor instructions */
8210 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8211 break;
8212 }
8213 break;
8214 default:
8215 err = 1;
8216 }
8217
8218 if (err)
8219 internal_error (__FILE__, __LINE__,
8220 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8221
8222 }
8223
8224 static void
8225 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8226 struct regcache *regs,
8227 arm_displaced_step_copy_insn_closure *dsc)
8228 {
8229 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8230 uint16_t insn1
8231 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8232
8233 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8234 insn1, (unsigned long) from);
8235
8236 dsc->is_thumb = 1;
8237 dsc->insn_size = thumb_insn_size (insn1);
8238 if (thumb_insn_size (insn1) == 4)
8239 {
8240 uint16_t insn2
8241 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8242 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8243 }
8244 else
8245 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8246 }
8247
8248 void
8249 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8250 CORE_ADDR to, struct regcache *regs,
8251 arm_displaced_step_copy_insn_closure *dsc)
8252 {
8253 int err = 0;
8254 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8255 uint32_t insn;
8256
8257 /* Most displaced instructions use a 1-instruction scratch space, so set this
8258 here and override below if/when necessary. */
8259 dsc->numinsns = 1;
8260 dsc->insn_addr = from;
8261 dsc->scratch_base = to;
8262 dsc->cleanup = NULL;
8263 dsc->wrote_to_pc = 0;
8264
8265 if (!displaced_in_arm_mode (regs))
8266 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8267
8268 dsc->is_thumb = 0;
8269 dsc->insn_size = 4;
8270 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8271 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8272 (unsigned long) insn, (unsigned long) from);
8273
8274 if ((insn & 0xf0000000) == 0xf0000000)
8275 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8276 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8277 {
8278 case 0x0: case 0x1: case 0x2: case 0x3:
8279 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8280 break;
8281
8282 case 0x4: case 0x5: case 0x6:
8283 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8284 break;
8285
8286 case 0x7:
8287 err = arm_decode_media (gdbarch, insn, dsc);
8288 break;
8289
8290 case 0x8: case 0x9: case 0xa: case 0xb:
8291 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8292 break;
8293
8294 case 0xc: case 0xd: case 0xe: case 0xf:
8295 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8296 break;
8297 }
8298
8299 if (err)
8300 internal_error (__FILE__, __LINE__,
8301 _("arm_process_displaced_insn: Instruction decode error"));
8302 }
8303
8304 /* Actually set up the scratch space for a displaced instruction. */
8305
8306 void
8307 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8308 CORE_ADDR to,
8309 arm_displaced_step_copy_insn_closure *dsc)
8310 {
8311 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8312 unsigned int i, len, offset;
8313 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8314 int size = dsc->is_thumb? 2 : 4;
8315 const gdb_byte *bkp_insn;
8316
8317 offset = 0;
8318 /* Poke modified instruction(s). */
8319 for (i = 0; i < dsc->numinsns; i++)
8320 {
8321 if (size == 4)
8322 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8323 dsc->modinsn[i], (unsigned long) to + offset);
8324 else if (size == 2)
8325 displaced_debug_printf ("writing insn %.4x at %.8lx",
8326 (unsigned short) dsc->modinsn[i],
8327 (unsigned long) to + offset);
8328
8329 write_memory_unsigned_integer (to + offset, size,
8330 byte_order_for_code,
8331 dsc->modinsn[i]);
8332 offset += size;
8333 }
8334
8335 /* Choose the correct breakpoint instruction. */
8336 if (dsc->is_thumb)
8337 {
8338 bkp_insn = tdep->thumb_breakpoint;
8339 len = tdep->thumb_breakpoint_size;
8340 }
8341 else
8342 {
8343 bkp_insn = tdep->arm_breakpoint;
8344 len = tdep->arm_breakpoint_size;
8345 }
8346
8347 /* Put breakpoint afterwards. */
8348 write_memory (to + offset, bkp_insn, len);
8349
8350 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8351 paddress (gdbarch, to));
8352 }
8353
8354 /* Entry point for cleaning things up after a displaced instruction has been
8355 single-stepped. */
8356
8357 void
8358 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8359 struct displaced_step_copy_insn_closure *dsc_,
8360 CORE_ADDR from, CORE_ADDR to,
8361 struct regcache *regs)
8362 {
8363 arm_displaced_step_copy_insn_closure *dsc
8364 = (arm_displaced_step_copy_insn_closure *) dsc_;
8365
8366 if (dsc->cleanup)
8367 dsc->cleanup (gdbarch, regs, dsc);
8368
8369 if (!dsc->wrote_to_pc)
8370 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8371 dsc->insn_addr + dsc->insn_size);
8372
8373 }
8374
8375 #include "bfd-in2.h"
8376 #include "libcoff.h"
8377
8378 static int
8379 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8380 {
8381 gdb_disassemble_info *di
8382 = static_cast<gdb_disassemble_info *> (info->application_data);
8383 struct gdbarch *gdbarch = di->arch ();
8384
8385 if (arm_pc_is_thumb (gdbarch, memaddr))
8386 {
8387 static asymbol *asym;
8388 static combined_entry_type ce;
8389 static struct coff_symbol_struct csym;
8390 static struct bfd fake_bfd;
8391 static bfd_target fake_target;
8392
8393 if (csym.native == NULL)
8394 {
8395 /* Create a fake symbol vector containing a Thumb symbol.
8396 This is solely so that the code in print_insn_little_arm()
8397 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8398 the presence of a Thumb symbol and switch to decoding
8399 Thumb instructions. */
8400
8401 fake_target.flavour = bfd_target_coff_flavour;
8402 fake_bfd.xvec = &fake_target;
8403 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8404 csym.native = &ce;
8405 csym.symbol.the_bfd = &fake_bfd;
8406 csym.symbol.name = "fake";
8407 asym = (asymbol *) & csym;
8408 }
8409
8410 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8411 info->symbols = &asym;
8412 }
8413 else
8414 info->symbols = NULL;
8415
8416 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8417 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8418 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8419 the assert on the mismatch of info->mach and
8420 bfd_get_mach (current_program_space->exec_bfd ()) in
8421 default_print_insn. */
8422 if (current_program_space->exec_bfd () != NULL
8423 && (current_program_space->exec_bfd ()->arch_info
8424 == gdbarch_bfd_arch_info (gdbarch)))
8425 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8426
8427 return default_print_insn (memaddr, info);
8428 }
8429
8430 /* The following define instruction sequences that will cause ARM
8431 cpu's to take an undefined instruction trap. These are used to
8432 signal a breakpoint to GDB.
8433
8434 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8435 modes. A different instruction is required for each mode. The ARM
8436 cpu's can also be big or little endian. Thus four different
8437 instructions are needed to support all cases.
8438
8439 Note: ARMv4 defines several new instructions that will take the
8440 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8441 not in fact add the new instructions. The new undefined
8442 instructions in ARMv4 are all instructions that had no defined
8443 behaviour in earlier chips. There is no guarantee that they will
8444 raise an exception, but may be treated as NOP's. In practice, it
8445 may only safe to rely on instructions matching:
8446
8447 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8448 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8449 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8450
8451 Even this may only true if the condition predicate is true. The
8452 following use a condition predicate of ALWAYS so it is always TRUE.
8453
8454 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8455 and NetBSD all use a software interrupt rather than an undefined
8456 instruction to force a trap. This can be handled by by the
8457 abi-specific code during establishment of the gdbarch vector. */
8458
8459 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8460 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8461 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8462 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8463
8464 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8465 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8466 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8467 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8468
8469 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8470
8471 static int
8472 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8473 {
8474 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8475 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8476
8477 if (arm_pc_is_thumb (gdbarch, *pcptr))
8478 {
8479 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8480
8481 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8482 check whether we are replacing a 32-bit instruction. */
8483 if (tdep->thumb2_breakpoint != NULL)
8484 {
8485 gdb_byte buf[2];
8486
8487 if (target_read_memory (*pcptr, buf, 2) == 0)
8488 {
8489 unsigned short inst1;
8490
8491 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8492 if (thumb_insn_size (inst1) == 4)
8493 return ARM_BP_KIND_THUMB2;
8494 }
8495 }
8496
8497 return ARM_BP_KIND_THUMB;
8498 }
8499 else
8500 return ARM_BP_KIND_ARM;
8501
8502 }
8503
8504 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8505
8506 static const gdb_byte *
8507 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8508 {
8509 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8510
8511 switch (kind)
8512 {
8513 case ARM_BP_KIND_ARM:
8514 *size = tdep->arm_breakpoint_size;
8515 return tdep->arm_breakpoint;
8516 case ARM_BP_KIND_THUMB:
8517 *size = tdep->thumb_breakpoint_size;
8518 return tdep->thumb_breakpoint;
8519 case ARM_BP_KIND_THUMB2:
8520 *size = tdep->thumb2_breakpoint_size;
8521 return tdep->thumb2_breakpoint;
8522 default:
8523 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8524 }
8525 }
8526
8527 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8528
8529 static int
8530 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8531 struct regcache *regcache,
8532 CORE_ADDR *pcptr)
8533 {
8534 gdb_byte buf[4];
8535
8536 /* Check the memory pointed by PC is readable. */
8537 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8538 {
8539 struct arm_get_next_pcs next_pcs_ctx;
8540
8541 arm_get_next_pcs_ctor (&next_pcs_ctx,
8542 &arm_get_next_pcs_ops,
8543 gdbarch_byte_order (gdbarch),
8544 gdbarch_byte_order_for_code (gdbarch),
8545 0,
8546 regcache);
8547
8548 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8549
8550 /* If MEMADDR is the next instruction of current pc, do the
8551 software single step computation, and get the thumb mode by
8552 the destination address. */
8553 for (CORE_ADDR pc : next_pcs)
8554 {
8555 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8556 {
8557 if (IS_THUMB_ADDR (pc))
8558 {
8559 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8560 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8561 }
8562 else
8563 return ARM_BP_KIND_ARM;
8564 }
8565 }
8566 }
8567
8568 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8569 }
8570
8571 /* Extract from an array REGBUF containing the (raw) register state a
8572 function return value of type TYPE, and copy that, in virtual
8573 format, into VALBUF. */
8574
8575 static void
8576 arm_extract_return_value (struct type *type, struct regcache *regs,
8577 gdb_byte *valbuf)
8578 {
8579 struct gdbarch *gdbarch = regs->arch ();
8580 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8581 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8582
8583 if (TYPE_CODE_FLT == type->code ())
8584 {
8585 switch (tdep->fp_model)
8586 {
8587 case ARM_FLOAT_FPA:
8588 {
8589 /* The value is in register F0 in internal format. We need to
8590 extract the raw value and then convert it to the desired
8591 internal type. */
8592 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8593
8594 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8595 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8596 valbuf, type);
8597 }
8598 break;
8599
8600 case ARM_FLOAT_SOFT_FPA:
8601 case ARM_FLOAT_SOFT_VFP:
8602 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8603 not using the VFP ABI code. */
8604 case ARM_FLOAT_VFP:
8605 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8606 if (TYPE_LENGTH (type) > 4)
8607 regs->cooked_read (ARM_A1_REGNUM + 1,
8608 valbuf + ARM_INT_REGISTER_SIZE);
8609 break;
8610
8611 default:
8612 internal_error (__FILE__, __LINE__,
8613 _("arm_extract_return_value: "
8614 "Floating point model not supported"));
8615 break;
8616 }
8617 }
8618 else if (type->code () == TYPE_CODE_INT
8619 || type->code () == TYPE_CODE_CHAR
8620 || type->code () == TYPE_CODE_BOOL
8621 || type->code () == TYPE_CODE_PTR
8622 || TYPE_IS_REFERENCE (type)
8623 || type->code () == TYPE_CODE_ENUM
8624 || is_fixed_point_type (type))
8625 {
8626 /* If the type is a plain integer, then the access is
8627 straight-forward. Otherwise we have to play around a bit
8628 more. */
8629 int len = TYPE_LENGTH (type);
8630 int regno = ARM_A1_REGNUM;
8631 ULONGEST tmp;
8632
8633 while (len > 0)
8634 {
8635 /* By using store_unsigned_integer we avoid having to do
8636 anything special for small big-endian values. */
8637 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8638 store_unsigned_integer (valbuf,
8639 (len > ARM_INT_REGISTER_SIZE
8640 ? ARM_INT_REGISTER_SIZE : len),
8641 byte_order, tmp);
8642 len -= ARM_INT_REGISTER_SIZE;
8643 valbuf += ARM_INT_REGISTER_SIZE;
8644 }
8645 }
8646 else
8647 {
8648 /* For a structure or union the behaviour is as if the value had
8649 been stored to word-aligned memory and then loaded into
8650 registers with 32-bit load instruction(s). */
8651 int len = TYPE_LENGTH (type);
8652 int regno = ARM_A1_REGNUM;
8653 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8654
8655 while (len > 0)
8656 {
8657 regs->cooked_read (regno++, tmpbuf);
8658 memcpy (valbuf, tmpbuf,
8659 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8660 len -= ARM_INT_REGISTER_SIZE;
8661 valbuf += ARM_INT_REGISTER_SIZE;
8662 }
8663 }
8664 }
8665
8666
8667 /* Will a function return an aggregate type in memory or in a
8668 register? Return 0 if an aggregate type can be returned in a
8669 register, 1 if it must be returned in memory. */
8670
8671 static int
8672 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8673 {
8674 enum type_code code;
8675
8676 type = check_typedef (type);
8677
8678 /* Simple, non-aggregate types (ie not including vectors and
8679 complex) are always returned in a register (or registers). */
8680 code = type->code ();
8681 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8682 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8683 return 0;
8684
8685 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8686 {
8687 /* Vector values should be returned using ARM registers if they
8688 are not over 16 bytes. */
8689 return (TYPE_LENGTH (type) > 16);
8690 }
8691
8692 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8693 if (tdep->arm_abi != ARM_ABI_APCS)
8694 {
8695 /* The AAPCS says all aggregates not larger than a word are returned
8696 in a register. */
8697 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE
8698 && language_pass_by_reference (type).trivially_copyable)
8699 return 0;
8700
8701 return 1;
8702 }
8703 else
8704 {
8705 int nRc;
8706
8707 /* All aggregate types that won't fit in a register must be returned
8708 in memory. */
8709 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE
8710 || !language_pass_by_reference (type).trivially_copyable)
8711 return 1;
8712
8713 /* In the ARM ABI, "integer" like aggregate types are returned in
8714 registers. For an aggregate type to be integer like, its size
8715 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8716 offset of each addressable subfield must be zero. Note that bit
8717 fields are not addressable, and all addressable subfields of
8718 unions always start at offset zero.
8719
8720 This function is based on the behaviour of GCC 2.95.1.
8721 See: gcc/arm.c: arm_return_in_memory() for details.
8722
8723 Note: All versions of GCC before GCC 2.95.2 do not set up the
8724 parameters correctly for a function returning the following
8725 structure: struct { float f;}; This should be returned in memory,
8726 not a register. Richard Earnshaw sent me a patch, but I do not
8727 know of any way to detect if a function like the above has been
8728 compiled with the correct calling convention. */
8729
8730 /* Assume all other aggregate types can be returned in a register.
8731 Run a check for structures, unions and arrays. */
8732 nRc = 0;
8733
8734 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8735 {
8736 int i;
8737 /* Need to check if this struct/union is "integer" like. For
8738 this to be true, its size must be less than or equal to
8739 ARM_INT_REGISTER_SIZE and the offset of each addressable
8740 subfield must be zero. Note that bit fields are not
8741 addressable, and unions always start at offset zero. If any
8742 of the subfields is a floating point type, the struct/union
8743 cannot be an integer type. */
8744
8745 /* For each field in the object, check:
8746 1) Is it FP? --> yes, nRc = 1;
8747 2) Is it addressable (bitpos != 0) and
8748 not packed (bitsize == 0)?
8749 --> yes, nRc = 1
8750 */
8751
8752 for (i = 0; i < type->num_fields (); i++)
8753 {
8754 enum type_code field_type_code;
8755
8756 field_type_code
8757 = check_typedef (type->field (i).type ())->code ();
8758
8759 /* Is it a floating point type field? */
8760 if (field_type_code == TYPE_CODE_FLT)
8761 {
8762 nRc = 1;
8763 break;
8764 }
8765
8766 /* If bitpos != 0, then we have to care about it. */
8767 if (type->field (i).loc_bitpos () != 0)
8768 {
8769 /* Bitfields are not addressable. If the field bitsize is
8770 zero, then the field is not packed. Hence it cannot be
8771 a bitfield or any other packed type. */
8772 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8773 {
8774 nRc = 1;
8775 break;
8776 }
8777 }
8778 }
8779 }
8780
8781 return nRc;
8782 }
8783 }
8784
8785 /* Write into appropriate registers a function return value of type
8786 TYPE, given in virtual format. */
8787
8788 static void
8789 arm_store_return_value (struct type *type, struct regcache *regs,
8790 const gdb_byte *valbuf)
8791 {
8792 struct gdbarch *gdbarch = regs->arch ();
8793 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8794
8795 if (type->code () == TYPE_CODE_FLT)
8796 {
8797 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8798 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8799
8800 switch (tdep->fp_model)
8801 {
8802 case ARM_FLOAT_FPA:
8803
8804 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8805 regs->cooked_write (ARM_F0_REGNUM, buf);
8806 break;
8807
8808 case ARM_FLOAT_SOFT_FPA:
8809 case ARM_FLOAT_SOFT_VFP:
8810 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8811 not using the VFP ABI code. */
8812 case ARM_FLOAT_VFP:
8813 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8814 if (TYPE_LENGTH (type) > 4)
8815 regs->cooked_write (ARM_A1_REGNUM + 1,
8816 valbuf + ARM_INT_REGISTER_SIZE);
8817 break;
8818
8819 default:
8820 internal_error (__FILE__, __LINE__,
8821 _("arm_store_return_value: Floating "
8822 "point model not supported"));
8823 break;
8824 }
8825 }
8826 else if (type->code () == TYPE_CODE_INT
8827 || type->code () == TYPE_CODE_CHAR
8828 || type->code () == TYPE_CODE_BOOL
8829 || type->code () == TYPE_CODE_PTR
8830 || TYPE_IS_REFERENCE (type)
8831 || type->code () == TYPE_CODE_ENUM)
8832 {
8833 if (TYPE_LENGTH (type) <= 4)
8834 {
8835 /* Values of one word or less are zero/sign-extended and
8836 returned in r0. */
8837 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8838 LONGEST val = unpack_long (type, valbuf);
8839
8840 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8841 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8842 }
8843 else
8844 {
8845 /* Integral values greater than one word are stored in consecutive
8846 registers starting with r0. This will always be a multiple of
8847 the regiser size. */
8848 int len = TYPE_LENGTH (type);
8849 int regno = ARM_A1_REGNUM;
8850
8851 while (len > 0)
8852 {
8853 regs->cooked_write (regno++, valbuf);
8854 len -= ARM_INT_REGISTER_SIZE;
8855 valbuf += ARM_INT_REGISTER_SIZE;
8856 }
8857 }
8858 }
8859 else
8860 {
8861 /* For a structure or union the behaviour is as if the value had
8862 been stored to word-aligned memory and then loaded into
8863 registers with 32-bit load instruction(s). */
8864 int len = TYPE_LENGTH (type);
8865 int regno = ARM_A1_REGNUM;
8866 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8867
8868 while (len > 0)
8869 {
8870 memcpy (tmpbuf, valbuf,
8871 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8872 regs->cooked_write (regno++, tmpbuf);
8873 len -= ARM_INT_REGISTER_SIZE;
8874 valbuf += ARM_INT_REGISTER_SIZE;
8875 }
8876 }
8877 }
8878
8879
8880 /* Handle function return values. */
8881
8882 static enum return_value_convention
8883 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8884 struct type *valtype, struct regcache *regcache,
8885 gdb_byte *readbuf, const gdb_byte *writebuf)
8886 {
8887 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8888 struct type *func_type = function ? value_type (function) : NULL;
8889 enum arm_vfp_cprc_base_type vfp_base_type;
8890 int vfp_base_count;
8891
8892 if (arm_vfp_abi_for_function (gdbarch, func_type)
8893 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8894 {
8895 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8896 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8897 int i;
8898 for (i = 0; i < vfp_base_count; i++)
8899 {
8900 if (reg_char == 'q')
8901 {
8902 if (writebuf)
8903 arm_neon_quad_write (gdbarch, regcache, i,
8904 writebuf + i * unit_length);
8905
8906 if (readbuf)
8907 arm_neon_quad_read (gdbarch, regcache, i,
8908 readbuf + i * unit_length);
8909 }
8910 else
8911 {
8912 char name_buf[4];
8913 int regnum;
8914
8915 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8916 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8917 strlen (name_buf));
8918 if (writebuf)
8919 regcache->cooked_write (regnum, writebuf + i * unit_length);
8920 if (readbuf)
8921 regcache->cooked_read (regnum, readbuf + i * unit_length);
8922 }
8923 }
8924 return RETURN_VALUE_REGISTER_CONVENTION;
8925 }
8926
8927 if (valtype->code () == TYPE_CODE_STRUCT
8928 || valtype->code () == TYPE_CODE_UNION
8929 || valtype->code () == TYPE_CODE_ARRAY)
8930 {
8931 /* From the AAPCS document:
8932
8933 Result return:
8934
8935 A Composite Type larger than 4 bytes, or whose size cannot be
8936 determined statically by both caller and callee, is stored in memory
8937 at an address passed as an extra argument when the function was
8938 called (Parameter Passing, rule A.4). The memory to be used for the
8939 result may be modified at any point during the function call.
8940
8941 Parameter Passing:
8942
8943 A.4: If the subroutine is a function that returns a result in memory,
8944 then the address for the result is placed in r0 and the NCRN is set
8945 to r1. */
8946 if (tdep->struct_return == pcc_struct_return
8947 || arm_return_in_memory (gdbarch, valtype))
8948 {
8949 if (readbuf)
8950 {
8951 CORE_ADDR addr;
8952
8953 regcache->cooked_read (ARM_A1_REGNUM, &addr);
8954 read_memory (addr, readbuf, TYPE_LENGTH (valtype));
8955 }
8956 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
8957 }
8958 }
8959 else if (valtype->code () == TYPE_CODE_COMPLEX)
8960 {
8961 if (arm_return_in_memory (gdbarch, valtype))
8962 return RETURN_VALUE_STRUCT_CONVENTION;
8963 }
8964
8965 if (writebuf)
8966 arm_store_return_value (valtype, regcache, writebuf);
8967
8968 if (readbuf)
8969 arm_extract_return_value (valtype, regcache, readbuf);
8970
8971 return RETURN_VALUE_REGISTER_CONVENTION;
8972 }
8973
8974
8975 static int
8976 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8977 {
8978 struct gdbarch *gdbarch = get_frame_arch (frame);
8979 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8980 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8981 CORE_ADDR jb_addr;
8982 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8983
8984 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8985
8986 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8987 ARM_INT_REGISTER_SIZE))
8988 return 0;
8989
8990 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8991 return 1;
8992 }
8993 /* A call to cmse secure entry function "foo" at "a" is modified by
8994 GNU ld as "b".
8995 a) bl xxxx <foo>
8996
8997 <foo>
8998 xxxx:
8999
9000 b) bl yyyy <__acle_se_foo>
9001
9002 section .gnu.sgstubs:
9003 <foo>
9004 yyyy: sg // secure gateway
9005 b.w xxxx <__acle_se_foo> // original_branch_dest
9006
9007 <__acle_se_foo>
9008 xxxx:
9009
9010 When the control at "b", the pc contains "yyyy" (sg address) which is a
9011 trampoline and does not exist in source code. This function returns the
9012 target pc "xxxx". For more details please refer to section 5.4
9013 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9014 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9015 document on www.developer.arm.com. */
9016
9017 static CORE_ADDR
9018 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9019 {
9020 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9021 char *target_name = (char *) alloca (target_len);
9022 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9023
9024 struct bound_minimal_symbol minsym
9025 = lookup_minimal_symbol (target_name, NULL, objfile);
9026
9027 if (minsym.minsym != nullptr)
9028 return minsym.value_address ();
9029
9030 return 0;
9031 }
9032
9033 /* Return true when SEC points to ".gnu.sgstubs" section. */
9034
9035 static bool
9036 arm_is_sgstubs_section (struct obj_section *sec)
9037 {
9038 return (sec != nullptr
9039 && sec->the_bfd_section != nullptr
9040 && sec->the_bfd_section->name != nullptr
9041 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9042 }
9043
9044 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9045 return the target PC. Otherwise return 0. */
9046
9047 CORE_ADDR
9048 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9049 {
9050 const char *name;
9051 int namelen;
9052 CORE_ADDR start_addr;
9053
9054 /* Find the starting address and name of the function containing the PC. */
9055 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9056 {
9057 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9058 check here. */
9059 start_addr = arm_skip_bx_reg (frame, pc);
9060 if (start_addr != 0)
9061 return start_addr;
9062
9063 return 0;
9064 }
9065
9066 /* If PC is in a Thumb call or return stub, return the address of the
9067 target PC, which is in a register. The thunk functions are called
9068 _call_via_xx, where x is the register name. The possible names
9069 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9070 functions, named __ARM_call_via_r[0-7]. */
9071 if (startswith (name, "_call_via_")
9072 || startswith (name, "__ARM_call_via_"))
9073 {
9074 /* Use the name suffix to determine which register contains the
9075 target PC. */
9076 static const char *table[15] =
9077 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9078 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9079 };
9080 int regno;
9081 int offset = strlen (name) - 2;
9082
9083 for (regno = 0; regno <= 14; regno++)
9084 if (strcmp (&name[offset], table[regno]) == 0)
9085 return get_frame_register_unsigned (frame, regno);
9086 }
9087
9088 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9089 non-interworking calls to foo. We could decode the stubs
9090 to find the target but it's easier to use the symbol table. */
9091 namelen = strlen (name);
9092 if (name[0] == '_' && name[1] == '_'
9093 && ((namelen > 2 + strlen ("_from_thumb")
9094 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9095 || (namelen > 2 + strlen ("_from_arm")
9096 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9097 {
9098 char *target_name;
9099 int target_len = namelen - 2;
9100 struct bound_minimal_symbol minsym;
9101 struct objfile *objfile;
9102 struct obj_section *sec;
9103
9104 if (name[namelen - 1] == 'b')
9105 target_len -= strlen ("_from_thumb");
9106 else
9107 target_len -= strlen ("_from_arm");
9108
9109 target_name = (char *) alloca (target_len + 1);
9110 memcpy (target_name, name + 2, target_len);
9111 target_name[target_len] = '\0';
9112
9113 sec = find_pc_section (pc);
9114 objfile = (sec == NULL) ? NULL : sec->objfile;
9115 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9116 if (minsym.minsym != NULL)
9117 return minsym.value_address ();
9118 else
9119 return 0;
9120 }
9121
9122 struct obj_section *section = find_pc_section (pc);
9123
9124 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9125 if (arm_is_sgstubs_section (section))
9126 return arm_skip_cmse_entry (pc, name, section->objfile);
9127
9128 return 0; /* not a stub */
9129 }
9130
9131 static void
9132 arm_update_current_architecture (void)
9133 {
9134 /* If the current architecture is not ARM, we have nothing to do. */
9135 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9136 return;
9137
9138 /* Update the architecture. */
9139 gdbarch_info info;
9140 if (!gdbarch_update_p (info))
9141 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9142 }
9143
9144 static void
9145 set_fp_model_sfunc (const char *args, int from_tty,
9146 struct cmd_list_element *c)
9147 {
9148 int fp_model;
9149
9150 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9151 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9152 {
9153 arm_fp_model = (enum arm_float_model) fp_model;
9154 break;
9155 }
9156
9157 if (fp_model == ARM_FLOAT_LAST)
9158 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9159 current_fp_model);
9160
9161 arm_update_current_architecture ();
9162 }
9163
9164 static void
9165 show_fp_model (struct ui_file *file, int from_tty,
9166 struct cmd_list_element *c, const char *value)
9167 {
9168 arm_gdbarch_tdep *tdep
9169 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9170
9171 if (arm_fp_model == ARM_FLOAT_AUTO
9172 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9173 gdb_printf (file, _("\
9174 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9175 fp_model_strings[tdep->fp_model]);
9176 else
9177 gdb_printf (file, _("\
9178 The current ARM floating point model is \"%s\".\n"),
9179 fp_model_strings[arm_fp_model]);
9180 }
9181
9182 static void
9183 arm_set_abi (const char *args, int from_tty,
9184 struct cmd_list_element *c)
9185 {
9186 int arm_abi;
9187
9188 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9189 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9190 {
9191 arm_abi_global = (enum arm_abi_kind) arm_abi;
9192 break;
9193 }
9194
9195 if (arm_abi == ARM_ABI_LAST)
9196 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9197 arm_abi_string);
9198
9199 arm_update_current_architecture ();
9200 }
9201
9202 static void
9203 arm_show_abi (struct ui_file *file, int from_tty,
9204 struct cmd_list_element *c, const char *value)
9205 {
9206 arm_gdbarch_tdep *tdep
9207 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9208
9209 if (arm_abi_global == ARM_ABI_AUTO
9210 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9211 gdb_printf (file, _("\
9212 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9213 arm_abi_strings[tdep->arm_abi]);
9214 else
9215 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9216 arm_abi_string);
9217 }
9218
9219 static void
9220 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9221 struct cmd_list_element *c, const char *value)
9222 {
9223 gdb_printf (file,
9224 _("The current execution mode assumed "
9225 "(when symbols are unavailable) is \"%s\".\n"),
9226 arm_fallback_mode_string);
9227 }
9228
9229 static void
9230 arm_show_force_mode (struct ui_file *file, int from_tty,
9231 struct cmd_list_element *c, const char *value)
9232 {
9233 gdb_printf (file,
9234 _("The current execution mode assumed "
9235 "(even when symbols are available) is \"%s\".\n"),
9236 arm_force_mode_string);
9237 }
9238
9239 static void
9240 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9241 struct cmd_list_element *c, const char *value)
9242 {
9243 gdb_printf (file,
9244 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9245 arm_unwind_secure_frames ? "on" : "off");
9246 }
9247
9248 /* If the user changes the register disassembly style used for info
9249 register and other commands, we have to also switch the style used
9250 in opcodes for disassembly output. This function is run in the "set
9251 arm disassembly" command, and does that. */
9252
9253 static void
9254 set_disassembly_style_sfunc (const char *args, int from_tty,
9255 struct cmd_list_element *c)
9256 {
9257 /* Convert the short style name into the long style name (eg, reg-names-*)
9258 before calling the generic set_disassembler_options() function. */
9259 std::string long_name = std::string ("reg-names-") + disassembly_style;
9260 set_disassembler_options (&long_name[0]);
9261 }
9262
9263 static void
9264 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9265 struct cmd_list_element *c, const char *value)
9266 {
9267 struct gdbarch *gdbarch = get_current_arch ();
9268 char *options = get_disassembler_options (gdbarch);
9269 const char *style = "";
9270 int len = 0;
9271 const char *opt;
9272
9273 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9274 if (startswith (opt, "reg-names-"))
9275 {
9276 style = &opt[strlen ("reg-names-")];
9277 len = strcspn (style, ",");
9278 }
9279
9280 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9281 }
9282 \f
9283 /* Return the ARM register name corresponding to register I. */
9284 static const char *
9285 arm_register_name (struct gdbarch *gdbarch, int i)
9286 {
9287 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9288
9289 if (is_s_pseudo (gdbarch, i))
9290 {
9291 static const char *const s_pseudo_names[] = {
9292 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9293 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9294 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9295 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9296 };
9297
9298 return s_pseudo_names[i - tdep->s_pseudo_base];
9299 }
9300
9301 if (is_q_pseudo (gdbarch, i))
9302 {
9303 static const char *const q_pseudo_names[] = {
9304 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9305 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9306 };
9307
9308 return q_pseudo_names[i - tdep->q_pseudo_base];
9309 }
9310
9311 if (is_mve_pseudo (gdbarch, i))
9312 return "p0";
9313
9314 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9315 if (is_pacbti_pseudo (gdbarch, i))
9316 return "";
9317
9318 if (i >= ARRAY_SIZE (arm_register_names))
9319 /* These registers are only supported on targets which supply
9320 an XML description. */
9321 return "";
9322
9323 /* Non-pseudo registers. */
9324 return arm_register_names[i];
9325 }
9326
9327 /* Test whether the coff symbol specific value corresponds to a Thumb
9328 function. */
9329
9330 static int
9331 coff_sym_is_thumb (int val)
9332 {
9333 return (val == C_THUMBEXT
9334 || val == C_THUMBSTAT
9335 || val == C_THUMBEXTFUNC
9336 || val == C_THUMBSTATFUNC
9337 || val == C_THUMBLABEL);
9338 }
9339
9340 /* arm_coff_make_msymbol_special()
9341 arm_elf_make_msymbol_special()
9342
9343 These functions test whether the COFF or ELF symbol corresponds to
9344 an address in thumb code, and set a "special" bit in a minimal
9345 symbol to indicate that it does. */
9346
9347 static void
9348 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9349 {
9350 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9351
9352 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9353 == ST_BRANCH_TO_THUMB)
9354 MSYMBOL_SET_SPECIAL (msym);
9355 }
9356
9357 static void
9358 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9359 {
9360 if (coff_sym_is_thumb (val))
9361 MSYMBOL_SET_SPECIAL (msym);
9362 }
9363
9364 static void
9365 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9366 asymbol *sym)
9367 {
9368 const char *name = bfd_asymbol_name (sym);
9369 struct arm_per_bfd *data;
9370 struct arm_mapping_symbol new_map_sym;
9371
9372 gdb_assert (name[0] == '$');
9373 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9374 return;
9375
9376 data = arm_bfd_data_key.get (objfile->obfd);
9377 if (data == NULL)
9378 data = arm_bfd_data_key.emplace (objfile->obfd,
9379 objfile->obfd->section_count);
9380 arm_mapping_symbol_vec &map
9381 = data->section_maps[bfd_asymbol_section (sym)->index];
9382
9383 new_map_sym.value = sym->value;
9384 new_map_sym.type = name[1];
9385
9386 /* Insert at the end, the vector will be sorted on first use. */
9387 map.push_back (new_map_sym);
9388 }
9389
9390 static void
9391 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9392 {
9393 struct gdbarch *gdbarch = regcache->arch ();
9394 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9395
9396 /* If necessary, set the T bit. */
9397 if (arm_apcs_32)
9398 {
9399 ULONGEST val, t_bit;
9400 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9401 t_bit = arm_psr_thumb_bit (gdbarch);
9402 if (arm_pc_is_thumb (gdbarch, pc))
9403 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9404 val | t_bit);
9405 else
9406 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9407 val & ~t_bit);
9408 }
9409 }
9410
9411 /* Read the contents of a NEON quad register, by reading from two
9412 double registers. This is used to implement the quad pseudo
9413 registers, and for argument passing in case the quad registers are
9414 missing; vectors are passed in quad registers when using the VFP
9415 ABI, even if a NEON unit is not present. REGNUM is the index of
9416 the quad register, in [0, 15]. */
9417
9418 static enum register_status
9419 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9420 int regnum, gdb_byte *buf)
9421 {
9422 char name_buf[4];
9423 gdb_byte reg_buf[8];
9424 int offset, double_regnum;
9425 enum register_status status;
9426
9427 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9428 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9429 strlen (name_buf));
9430
9431 /* d0 is always the least significant half of q0. */
9432 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9433 offset = 8;
9434 else
9435 offset = 0;
9436
9437 status = regcache->raw_read (double_regnum, reg_buf);
9438 if (status != REG_VALID)
9439 return status;
9440 memcpy (buf + offset, reg_buf, 8);
9441
9442 offset = 8 - offset;
9443 status = regcache->raw_read (double_regnum + 1, reg_buf);
9444 if (status != REG_VALID)
9445 return status;
9446 memcpy (buf + offset, reg_buf, 8);
9447
9448 return REG_VALID;
9449 }
9450
9451 /* Read the contents of the MVE pseudo register REGNUM and store it
9452 in BUF. */
9453
9454 static enum register_status
9455 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9456 int regnum, gdb_byte *buf)
9457 {
9458 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9459
9460 /* P0 is the first 16 bits of VPR. */
9461 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9462 }
9463
9464 static enum register_status
9465 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9466 int regnum, gdb_byte *buf)
9467 {
9468 const int num_regs = gdbarch_num_regs (gdbarch);
9469 char name_buf[4];
9470 gdb_byte reg_buf[8];
9471 int offset, double_regnum;
9472 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9473
9474 gdb_assert (regnum >= num_regs);
9475
9476 if (is_q_pseudo (gdbarch, regnum))
9477 {
9478 /* Quad-precision register. */
9479 return arm_neon_quad_read (gdbarch, regcache,
9480 regnum - tdep->q_pseudo_base, buf);
9481 }
9482 else if (is_mve_pseudo (gdbarch, regnum))
9483 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9484 else
9485 {
9486 enum register_status status;
9487
9488 regnum -= tdep->s_pseudo_base;
9489 /* Single-precision register. */
9490 gdb_assert (regnum < 32);
9491
9492 /* s0 is always the least significant half of d0. */
9493 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9494 offset = (regnum & 1) ? 0 : 4;
9495 else
9496 offset = (regnum & 1) ? 4 : 0;
9497
9498 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9499 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9500 strlen (name_buf));
9501
9502 status = regcache->raw_read (double_regnum, reg_buf);
9503 if (status == REG_VALID)
9504 memcpy (buf, reg_buf + offset, 4);
9505 return status;
9506 }
9507 }
9508
9509 /* Store the contents of BUF to a NEON quad register, by writing to
9510 two double registers. This is used to implement the quad pseudo
9511 registers, and for argument passing in case the quad registers are
9512 missing; vectors are passed in quad registers when using the VFP
9513 ABI, even if a NEON unit is not present. REGNUM is the index
9514 of the quad register, in [0, 15]. */
9515
9516 static void
9517 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9518 int regnum, const gdb_byte *buf)
9519 {
9520 char name_buf[4];
9521 int offset, double_regnum;
9522
9523 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9524 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9525 strlen (name_buf));
9526
9527 /* d0 is always the least significant half of q0. */
9528 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9529 offset = 8;
9530 else
9531 offset = 0;
9532
9533 regcache->raw_write (double_regnum, buf + offset);
9534 offset = 8 - offset;
9535 regcache->raw_write (double_regnum + 1, buf + offset);
9536 }
9537
9538 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9539
9540 static void
9541 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9542 int regnum, const gdb_byte *buf)
9543 {
9544 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9545
9546 /* P0 is the first 16 bits of VPR. */
9547 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9548 }
9549
9550 static void
9551 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9552 int regnum, const gdb_byte *buf)
9553 {
9554 const int num_regs = gdbarch_num_regs (gdbarch);
9555 char name_buf[4];
9556 gdb_byte reg_buf[8];
9557 int offset, double_regnum;
9558 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9559
9560 gdb_assert (regnum >= num_regs);
9561
9562 if (is_q_pseudo (gdbarch, regnum))
9563 {
9564 /* Quad-precision register. */
9565 arm_neon_quad_write (gdbarch, regcache,
9566 regnum - tdep->q_pseudo_base, buf);
9567 }
9568 else if (is_mve_pseudo (gdbarch, regnum))
9569 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9570 else
9571 {
9572 regnum -= tdep->s_pseudo_base;
9573 /* Single-precision register. */
9574 gdb_assert (regnum < 32);
9575
9576 /* s0 is always the least significant half of d0. */
9577 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9578 offset = (regnum & 1) ? 0 : 4;
9579 else
9580 offset = (regnum & 1) ? 4 : 0;
9581
9582 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9583 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9584 strlen (name_buf));
9585
9586 regcache->raw_read (double_regnum, reg_buf);
9587 memcpy (reg_buf + offset, buf, 4);
9588 regcache->raw_write (double_regnum, reg_buf);
9589 }
9590 }
9591
9592 static struct value *
9593 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9594 {
9595 const int *reg_p = (const int *) baton;
9596 return value_of_register (*reg_p, frame);
9597 }
9598 \f
9599 static enum gdb_osabi
9600 arm_elf_osabi_sniffer (bfd *abfd)
9601 {
9602 unsigned int elfosabi;
9603 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9604
9605 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9606
9607 if (elfosabi == ELFOSABI_ARM)
9608 /* GNU tools use this value. Check note sections in this case,
9609 as well. */
9610 {
9611 for (asection *sect : gdb_bfd_sections (abfd))
9612 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9613 }
9614
9615 /* Anything else will be handled by the generic ELF sniffer. */
9616 return osabi;
9617 }
9618
9619 static int
9620 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9621 const struct reggroup *group)
9622 {
9623 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9624 this, FPS register belongs to save_regroup, restore_reggroup, and
9625 all_reggroup, of course. */
9626 if (regnum == ARM_FPS_REGNUM)
9627 return (group == float_reggroup
9628 || group == save_reggroup
9629 || group == restore_reggroup
9630 || group == all_reggroup);
9631 else
9632 return default_register_reggroup_p (gdbarch, regnum, group);
9633 }
9634
9635 /* For backward-compatibility we allow two 'g' packet lengths with
9636 the remote protocol depending on whether FPA registers are
9637 supplied. M-profile targets do not have FPA registers, but some
9638 stubs already exist in the wild which use a 'g' packet which
9639 supplies them albeit with dummy values. The packet format which
9640 includes FPA registers should be considered deprecated for
9641 M-profile targets. */
9642
9643 static void
9644 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9645 {
9646 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9647
9648 if (tdep->is_m)
9649 {
9650 const target_desc *tdesc;
9651
9652 /* If we know from the executable this is an M-profile target,
9653 cater for remote targets whose register set layout is the
9654 same as the FPA layout. */
9655 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9656 register_remote_g_packet_guess (gdbarch,
9657 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9658 tdesc);
9659
9660 /* The regular M-profile layout. */
9661 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9662 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9663 tdesc);
9664
9665 /* M-profile plus M4F VFP. */
9666 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9667 register_remote_g_packet_guess (gdbarch,
9668 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9669 tdesc);
9670 /* M-profile plus MVE. */
9671 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9672 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9673 + ARM_VFP2_REGS_SIZE
9674 + ARM_INT_REGISTER_SIZE, tdesc);
9675
9676 /* M-profile system (stack pointers). */
9677 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
9678 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
9679 }
9680
9681 /* Otherwise we don't have a useful guess. */
9682 }
9683
9684 /* Implement the code_of_frame_writable gdbarch method. */
9685
9686 static int
9687 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9688 {
9689 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9690
9691 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9692 {
9693 /* M-profile exception frames return to some magic PCs, where
9694 isn't writable at all. */
9695 return 0;
9696 }
9697 else
9698 return 1;
9699 }
9700
9701 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9702 to be postfixed by a version (eg armv7hl). */
9703
9704 static const char *
9705 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9706 {
9707 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9708 return "arm(v[^- ]*)?";
9709 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9710 }
9711
9712 /* Implement the "get_pc_address_flags" gdbarch method. */
9713
9714 static std::string
9715 arm_get_pc_address_flags (frame_info *frame, CORE_ADDR pc)
9716 {
9717 if (get_frame_pc_masked (frame))
9718 return "PAC";
9719
9720 return "";
9721 }
9722
9723 /* Initialize the current architecture based on INFO. If possible,
9724 re-use an architecture from ARCHES, which is a list of
9725 architectures already created during this debugging session.
9726
9727 Called e.g. at program startup, when reading a core file, and when
9728 reading a binary file. */
9729
9730 static struct gdbarch *
9731 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9732 {
9733 struct gdbarch *gdbarch;
9734 struct gdbarch_list *best_arch;
9735 enum arm_abi_kind arm_abi = arm_abi_global;
9736 enum arm_float_model fp_model = arm_fp_model;
9737 tdesc_arch_data_up tdesc_data;
9738 int i;
9739 bool is_m = false;
9740 bool have_sec_ext = false;
9741 int vfp_register_count = 0;
9742 bool have_s_pseudos = false, have_q_pseudos = false;
9743 bool have_wmmx_registers = false;
9744 bool have_neon = false;
9745 bool have_fpa_registers = true;
9746 const struct target_desc *tdesc = info.target_desc;
9747 bool have_vfp = false;
9748 bool have_mve = false;
9749 bool have_pacbti = false;
9750 int mve_vpr_regnum = -1;
9751 int register_count = ARM_NUM_REGS;
9752 bool have_m_profile_msp = false;
9753 int m_profile_msp_regnum = -1;
9754 int m_profile_psp_regnum = -1;
9755 int m_profile_msp_ns_regnum = -1;
9756 int m_profile_psp_ns_regnum = -1;
9757 int m_profile_msp_s_regnum = -1;
9758 int m_profile_psp_s_regnum = -1;
9759 int tls_regnum = 0;
9760
9761 /* If we have an object to base this architecture on, try to determine
9762 its ABI. */
9763
9764 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9765 {
9766 int ei_osabi, e_flags;
9767
9768 switch (bfd_get_flavour (info.abfd))
9769 {
9770 case bfd_target_coff_flavour:
9771 /* Assume it's an old APCS-style ABI. */
9772 /* XXX WinCE? */
9773 arm_abi = ARM_ABI_APCS;
9774 break;
9775
9776 case bfd_target_elf_flavour:
9777 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9778 e_flags = elf_elfheader (info.abfd)->e_flags;
9779
9780 if (ei_osabi == ELFOSABI_ARM)
9781 {
9782 /* GNU tools used to use this value, but do not for EABI
9783 objects. There's nowhere to tag an EABI version
9784 anyway, so assume APCS. */
9785 arm_abi = ARM_ABI_APCS;
9786 }
9787 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9788 {
9789 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9790
9791 switch (eabi_ver)
9792 {
9793 case EF_ARM_EABI_UNKNOWN:
9794 /* Assume GNU tools. */
9795 arm_abi = ARM_ABI_APCS;
9796 break;
9797
9798 case EF_ARM_EABI_VER4:
9799 case EF_ARM_EABI_VER5:
9800 arm_abi = ARM_ABI_AAPCS;
9801 /* EABI binaries default to VFP float ordering.
9802 They may also contain build attributes that can
9803 be used to identify if the VFP argument-passing
9804 ABI is in use. */
9805 if (fp_model == ARM_FLOAT_AUTO)
9806 {
9807 #ifdef HAVE_ELF
9808 switch (bfd_elf_get_obj_attr_int (info.abfd,
9809 OBJ_ATTR_PROC,
9810 Tag_ABI_VFP_args))
9811 {
9812 case AEABI_VFP_args_base:
9813 /* "The user intended FP parameter/result
9814 passing to conform to AAPCS, base
9815 variant". */
9816 fp_model = ARM_FLOAT_SOFT_VFP;
9817 break;
9818 case AEABI_VFP_args_vfp:
9819 /* "The user intended FP parameter/result
9820 passing to conform to AAPCS, VFP
9821 variant". */
9822 fp_model = ARM_FLOAT_VFP;
9823 break;
9824 case AEABI_VFP_args_toolchain:
9825 /* "The user intended FP parameter/result
9826 passing to conform to tool chain-specific
9827 conventions" - we don't know any such
9828 conventions, so leave it as "auto". */
9829 break;
9830 case AEABI_VFP_args_compatible:
9831 /* "Code is compatible with both the base
9832 and VFP variants; the user did not permit
9833 non-variadic functions to pass FP
9834 parameters/results" - leave it as
9835 "auto". */
9836 break;
9837 default:
9838 /* Attribute value not mentioned in the
9839 November 2012 ABI, so leave it as
9840 "auto". */
9841 break;
9842 }
9843 #else
9844 fp_model = ARM_FLOAT_SOFT_VFP;
9845 #endif
9846 }
9847 break;
9848
9849 default:
9850 /* Leave it as "auto". */
9851 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9852 break;
9853 }
9854
9855 #ifdef HAVE_ELF
9856 /* Detect M-profile programs. This only works if the
9857 executable file includes build attributes; GCC does
9858 copy them to the executable, but e.g. RealView does
9859 not. */
9860 int attr_arch
9861 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9862 Tag_CPU_arch);
9863 int attr_profile
9864 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9865 Tag_CPU_arch_profile);
9866
9867 /* GCC specifies the profile for v6-M; RealView only
9868 specifies the profile for architectures starting with
9869 V7 (as opposed to architectures with a tag
9870 numerically greater than TAG_CPU_ARCH_V7). */
9871 if (!tdesc_has_registers (tdesc)
9872 && (attr_arch == TAG_CPU_ARCH_V6_M
9873 || attr_arch == TAG_CPU_ARCH_V6S_M
9874 || attr_arch == TAG_CPU_ARCH_V7E_M
9875 || attr_arch == TAG_CPU_ARCH_V8M_BASE
9876 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
9877 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9878 || attr_profile == 'M'))
9879 is_m = true;
9880
9881 /* Look for attributes that indicate support for ARMv8.1-m
9882 PACBTI. */
9883 if (!tdesc_has_registers (tdesc) && is_m)
9884 {
9885 int attr_pac_extension
9886 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9887 Tag_PAC_extension);
9888
9889 int attr_bti_extension
9890 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9891 Tag_BTI_extension);
9892
9893 int attr_pacret_use
9894 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9895 Tag_PACRET_use);
9896
9897 int attr_bti_use
9898 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9899 Tag_BTI_use);
9900
9901 if (attr_pac_extension != 0 || attr_bti_extension != 0
9902 || attr_pacret_use != 0 || attr_bti_use != 0)
9903 have_pacbti = true;
9904 }
9905 #endif
9906 }
9907
9908 if (fp_model == ARM_FLOAT_AUTO)
9909 {
9910 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9911 {
9912 case 0:
9913 /* Leave it as "auto". Strictly speaking this case
9914 means FPA, but almost nobody uses that now, and
9915 many toolchains fail to set the appropriate bits
9916 for the floating-point model they use. */
9917 break;
9918 case EF_ARM_SOFT_FLOAT:
9919 fp_model = ARM_FLOAT_SOFT_FPA;
9920 break;
9921 case EF_ARM_VFP_FLOAT:
9922 fp_model = ARM_FLOAT_VFP;
9923 break;
9924 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9925 fp_model = ARM_FLOAT_SOFT_VFP;
9926 break;
9927 }
9928 }
9929
9930 if (e_flags & EF_ARM_BE8)
9931 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9932
9933 break;
9934
9935 default:
9936 /* Leave it as "auto". */
9937 break;
9938 }
9939 }
9940
9941 /* Check any target description for validity. */
9942 if (tdesc_has_registers (tdesc))
9943 {
9944 /* For most registers we require GDB's default names; but also allow
9945 the numeric names for sp / lr / pc, as a convenience. */
9946 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9947 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9948 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9949
9950 const struct tdesc_feature *feature;
9951 int valid_p;
9952
9953 feature = tdesc_find_feature (tdesc,
9954 "org.gnu.gdb.arm.core");
9955 if (feature == NULL)
9956 {
9957 feature = tdesc_find_feature (tdesc,
9958 "org.gnu.gdb.arm.m-profile");
9959 if (feature == NULL)
9960 return NULL;
9961 else
9962 is_m = true;
9963 }
9964
9965 tdesc_data = tdesc_data_alloc ();
9966
9967 valid_p = 1;
9968 for (i = 0; i < ARM_SP_REGNUM; i++)
9969 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9970 arm_register_names[i]);
9971 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9972 ARM_SP_REGNUM,
9973 arm_sp_names);
9974 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9975 ARM_LR_REGNUM,
9976 arm_lr_names);
9977 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9978 ARM_PC_REGNUM,
9979 arm_pc_names);
9980 if (is_m)
9981 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9982 ARM_PS_REGNUM, "xpsr");
9983 else
9984 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9985 ARM_PS_REGNUM, "cpsr");
9986
9987 if (!valid_p)
9988 return NULL;
9989
9990 if (is_m)
9991 {
9992 feature = tdesc_find_feature (tdesc,
9993 "org.gnu.gdb.arm.m-system");
9994 if (feature != nullptr)
9995 {
9996 /* MSP */
9997 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9998 register_count, "msp");
9999 if (!valid_p)
10000 {
10001 warning (_("M-profile m-system feature is missing required register msp."));
10002 return nullptr;
10003 }
10004 have_m_profile_msp = true;
10005 m_profile_msp_regnum = register_count++;
10006
10007 /* PSP */
10008 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10009 register_count, "psp");
10010 if (!valid_p)
10011 {
10012 warning (_("M-profile m-system feature is missing required register psp."));
10013 return nullptr;
10014 }
10015 m_profile_psp_regnum = register_count++;
10016 }
10017 }
10018
10019 feature = tdesc_find_feature (tdesc,
10020 "org.gnu.gdb.arm.fpa");
10021 if (feature != NULL)
10022 {
10023 valid_p = 1;
10024 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10025 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10026 arm_register_names[i]);
10027 if (!valid_p)
10028 return NULL;
10029 }
10030 else
10031 have_fpa_registers = false;
10032
10033 feature = tdesc_find_feature (tdesc,
10034 "org.gnu.gdb.xscale.iwmmxt");
10035 if (feature != NULL)
10036 {
10037 static const char *const iwmmxt_names[] = {
10038 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10039 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10040 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10041 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10042 };
10043
10044 valid_p = 1;
10045 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10046 valid_p
10047 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10048 iwmmxt_names[i - ARM_WR0_REGNUM]);
10049
10050 /* Check for the control registers, but do not fail if they
10051 are missing. */
10052 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10053 tdesc_numbered_register (feature, tdesc_data.get (), i,
10054 iwmmxt_names[i - ARM_WR0_REGNUM]);
10055
10056 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10057 valid_p
10058 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10059 iwmmxt_names[i - ARM_WR0_REGNUM]);
10060
10061 if (!valid_p)
10062 return NULL;
10063
10064 have_wmmx_registers = true;
10065 }
10066
10067 /* If we have a VFP unit, check whether the single precision registers
10068 are present. If not, then we will synthesize them as pseudo
10069 registers. */
10070 feature = tdesc_find_feature (tdesc,
10071 "org.gnu.gdb.arm.vfp");
10072 if (feature != NULL)
10073 {
10074 static const char *const vfp_double_names[] = {
10075 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10076 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10077 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10078 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10079 };
10080
10081 /* Require the double precision registers. There must be either
10082 16 or 32. */
10083 valid_p = 1;
10084 for (i = 0; i < 32; i++)
10085 {
10086 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10087 ARM_D0_REGNUM + i,
10088 vfp_double_names[i]);
10089 if (!valid_p)
10090 break;
10091 }
10092 if (!valid_p && i == 16)
10093 valid_p = 1;
10094
10095 /* Also require FPSCR. */
10096 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10097 ARM_FPSCR_REGNUM, "fpscr");
10098 if (!valid_p)
10099 return NULL;
10100
10101 have_vfp = true;
10102
10103 if (tdesc_unnumbered_register (feature, "s0") == 0)
10104 have_s_pseudos = true;
10105
10106 vfp_register_count = i;
10107
10108 /* If we have VFP, also check for NEON. The architecture allows
10109 NEON without VFP (integer vector operations only), but GDB
10110 does not support that. */
10111 feature = tdesc_find_feature (tdesc,
10112 "org.gnu.gdb.arm.neon");
10113 if (feature != NULL)
10114 {
10115 /* NEON requires 32 double-precision registers. */
10116 if (i != 32)
10117 return NULL;
10118
10119 /* If there are quad registers defined by the stub, use
10120 their type; otherwise (normally) provide them with
10121 the default type. */
10122 if (tdesc_unnumbered_register (feature, "q0") == 0)
10123 have_q_pseudos = true;
10124 }
10125 }
10126
10127 /* Check for the TLS register feature. */
10128 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10129 if (feature != nullptr)
10130 {
10131 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10132 register_count, "tpidruro");
10133 if (!valid_p)
10134 return nullptr;
10135
10136 tls_regnum = register_count;
10137 register_count++;
10138 }
10139
10140 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10141 MVE (Helium) is an M-profile extension. */
10142 if (is_m)
10143 {
10144 /* Do we have the MVE feature? */
10145 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10146
10147 if (feature != nullptr)
10148 {
10149 /* If we have MVE, we must always have the VPR register. */
10150 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10151 register_count, "vpr");
10152 if (!valid_p)
10153 {
10154 warning (_("MVE feature is missing required register vpr."));
10155 return nullptr;
10156 }
10157
10158 have_mve = true;
10159 mve_vpr_regnum = register_count;
10160 register_count++;
10161
10162 /* We can't have Q pseudo registers available here, as that
10163 would mean we have NEON features, and that is only available
10164 on A and R profiles. */
10165 gdb_assert (!have_q_pseudos);
10166
10167 /* Given we have a M-profile target description, if MVE is
10168 enabled and there are VFP registers, we should have Q
10169 pseudo registers (Q0 ~ Q7). */
10170 if (have_vfp)
10171 have_q_pseudos = true;
10172 }
10173
10174 /* Do we have the ARMv8.1-m PACBTI feature? */
10175 feature = tdesc_find_feature (tdesc,
10176 "org.gnu.gdb.arm.m-profile-pacbti");
10177 if (feature != nullptr)
10178 {
10179 /* By advertising this feature, the target acknowledges the
10180 presence of the ARMv8.1-m PACBTI extensions.
10181
10182 We don't care for any particular registers in this group, so
10183 the target is free to include whatever it deems appropriate.
10184
10185 The expectation is for this feature to include the PAC
10186 keys. */
10187 have_pacbti = true;
10188 }
10189
10190 /* Do we have the Security extension? */
10191 feature = tdesc_find_feature (tdesc,
10192 "org.gnu.gdb.arm.secext");
10193 if (feature != nullptr)
10194 {
10195 /* Secure/Non-secure stack pointers. */
10196 /* MSP_NS */
10197 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10198 register_count, "msp_ns");
10199 if (!valid_p)
10200 {
10201 warning (_("M-profile secext feature is missing required register msp_ns."));
10202 return nullptr;
10203 }
10204 m_profile_msp_ns_regnum = register_count++;
10205
10206 /* PSP_NS */
10207 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10208 register_count, "psp_ns");
10209 if (!valid_p)
10210 {
10211 warning (_("M-profile secext feature is missing required register psp_ns."));
10212 return nullptr;
10213 }
10214 m_profile_psp_ns_regnum = register_count++;
10215
10216 /* MSP_S */
10217 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10218 register_count, "msp_s");
10219 if (!valid_p)
10220 {
10221 warning (_("M-profile secext feature is missing required register msp_s."));
10222 return nullptr;
10223 }
10224 m_profile_msp_s_regnum = register_count++;
10225
10226 /* PSP_S */
10227 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10228 register_count, "psp_s");
10229 if (!valid_p)
10230 {
10231 warning (_("M-profile secext feature is missing required register psp_s."));
10232 return nullptr;
10233 }
10234 m_profile_psp_s_regnum = register_count++;
10235
10236 have_sec_ext = true;
10237 }
10238
10239 }
10240 }
10241
10242 /* If there is already a candidate, use it. */
10243 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10244 best_arch != NULL;
10245 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10246 {
10247 arm_gdbarch_tdep *tdep
10248 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
10249
10250 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10251 continue;
10252
10253 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10254 continue;
10255
10256 /* There are various other properties in tdep that we do not
10257 need to check here: those derived from a target description,
10258 since gdbarches with a different target description are
10259 automatically disqualified. */
10260
10261 /* Do check is_m, though, since it might come from the binary. */
10262 if (is_m != tdep->is_m)
10263 continue;
10264
10265 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10266 the binary. */
10267 if (have_pacbti != tdep->have_pacbti)
10268 continue;
10269
10270 /* Found a match. */
10271 break;
10272 }
10273
10274 if (best_arch != NULL)
10275 return best_arch->gdbarch;
10276
10277 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
10278 gdbarch = gdbarch_alloc (&info, tdep);
10279
10280 /* Record additional information about the architecture we are defining.
10281 These are gdbarch discriminators, like the OSABI. */
10282 tdep->arm_abi = arm_abi;
10283 tdep->fp_model = fp_model;
10284 tdep->is_m = is_m;
10285 tdep->have_sec_ext = have_sec_ext;
10286 tdep->have_fpa_registers = have_fpa_registers;
10287 tdep->have_wmmx_registers = have_wmmx_registers;
10288 gdb_assert (vfp_register_count == 0
10289 || vfp_register_count == 16
10290 || vfp_register_count == 32);
10291 tdep->vfp_register_count = vfp_register_count;
10292 tdep->have_s_pseudos = have_s_pseudos;
10293 tdep->have_q_pseudos = have_q_pseudos;
10294 tdep->have_neon = have_neon;
10295 tdep->tls_regnum = tls_regnum;
10296
10297 /* Adjust the MVE feature settings. */
10298 if (have_mve)
10299 {
10300 tdep->have_mve = true;
10301 tdep->mve_vpr_regnum = mve_vpr_regnum;
10302 }
10303
10304 /* Adjust the PACBTI feature settings. */
10305 tdep->have_pacbti = have_pacbti;
10306
10307 /* Adjust the M-profile stack pointers settings. */
10308 if (have_m_profile_msp)
10309 {
10310 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10311 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10312 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10313 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10314 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10315 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10316 }
10317
10318 arm_register_g_packet_guesses (gdbarch);
10319
10320 /* Breakpoints. */
10321 switch (info.byte_order_for_code)
10322 {
10323 case BFD_ENDIAN_BIG:
10324 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10325 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10326 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10327 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10328
10329 break;
10330
10331 case BFD_ENDIAN_LITTLE:
10332 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10333 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10334 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10335 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10336
10337 break;
10338
10339 default:
10340 internal_error (__FILE__, __LINE__,
10341 _("arm_gdbarch_init: bad byte order for float format"));
10342 }
10343
10344 /* On ARM targets char defaults to unsigned. */
10345 set_gdbarch_char_signed (gdbarch, 0);
10346
10347 /* wchar_t is unsigned under the AAPCS. */
10348 if (tdep->arm_abi == ARM_ABI_AAPCS)
10349 set_gdbarch_wchar_signed (gdbarch, 0);
10350 else
10351 set_gdbarch_wchar_signed (gdbarch, 1);
10352
10353 /* Compute type alignment. */
10354 set_gdbarch_type_align (gdbarch, arm_type_align);
10355
10356 /* Note: for displaced stepping, this includes the breakpoint, and one word
10357 of additional scratch space. This setting isn't used for anything beside
10358 displaced stepping at present. */
10359 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10360
10361 /* This should be low enough for everything. */
10362 tdep->lowest_pc = 0x20;
10363 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10364
10365 /* The default, for both APCS and AAPCS, is to return small
10366 structures in registers. */
10367 tdep->struct_return = reg_struct_return;
10368
10369 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10370 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10371
10372 if (is_m)
10373 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10374
10375 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10376
10377 frame_base_set_default (gdbarch, &arm_normal_base);
10378
10379 /* Address manipulation. */
10380 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10381
10382 /* Advance PC across function entry code. */
10383 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10384
10385 /* Detect whether PC is at a point where the stack has been destroyed. */
10386 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10387
10388 /* Skip trampolines. */
10389 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10390
10391 /* The stack grows downward. */
10392 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10393
10394 /* Breakpoint manipulation. */
10395 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10396 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10397 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10398 arm_breakpoint_kind_from_current_state);
10399
10400 /* Information about registers, etc. */
10401 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10402 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10403 set_gdbarch_num_regs (gdbarch, register_count);
10404 set_gdbarch_register_type (gdbarch, arm_register_type);
10405 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10406
10407 /* This "info float" is FPA-specific. Use the generic version if we
10408 do not have FPA. */
10409 if (tdep->have_fpa_registers)
10410 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10411
10412 /* Internal <-> external register number maps. */
10413 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10414 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10415
10416 set_gdbarch_register_name (gdbarch, arm_register_name);
10417
10418 /* Returning results. */
10419 set_gdbarch_return_value (gdbarch, arm_return_value);
10420
10421 /* Disassembly. */
10422 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10423
10424 /* Minsymbol frobbing. */
10425 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10426 set_gdbarch_coff_make_msymbol_special (gdbarch,
10427 arm_coff_make_msymbol_special);
10428 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10429
10430 /* Thumb-2 IT block support. */
10431 set_gdbarch_adjust_breakpoint_address (gdbarch,
10432 arm_adjust_breakpoint_address);
10433
10434 /* Virtual tables. */
10435 set_gdbarch_vbit_in_delta (gdbarch, 1);
10436
10437 /* Hook in the ABI-specific overrides, if they have been registered. */
10438 gdbarch_init_osabi (info, gdbarch);
10439
10440 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10441
10442 /* Add some default predicates. */
10443 if (is_m)
10444 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10445 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10446 dwarf2_append_unwinders (gdbarch);
10447 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10448 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10449 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10450
10451 /* Now we have tuned the configuration, set a few final things,
10452 based on what the OS ABI has told us. */
10453
10454 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10455 binaries are always marked. */
10456 if (tdep->arm_abi == ARM_ABI_AUTO)
10457 tdep->arm_abi = ARM_ABI_APCS;
10458
10459 /* Watchpoints are not steppable. */
10460 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10461
10462 /* We used to default to FPA for generic ARM, but almost nobody
10463 uses that now, and we now provide a way for the user to force
10464 the model. So default to the most useful variant. */
10465 if (tdep->fp_model == ARM_FLOAT_AUTO)
10466 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10467
10468 if (tdep->jb_pc >= 0)
10469 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10470
10471 /* Floating point sizes and format. */
10472 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10473 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10474 {
10475 set_gdbarch_double_format
10476 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10477 set_gdbarch_long_double_format
10478 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10479 }
10480 else
10481 {
10482 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10483 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10484 }
10485
10486 /* Hook used to decorate frames with signed return addresses, only available
10487 for ARMv8.1-m PACBTI. */
10488 if (is_m && have_pacbti)
10489 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10490
10491 if (tdesc_data != nullptr)
10492 {
10493 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10494
10495 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10496 register_count = gdbarch_num_regs (gdbarch);
10497
10498 /* Override tdesc_register_type to adjust the types of VFP
10499 registers for NEON. */
10500 set_gdbarch_register_type (gdbarch, arm_register_type);
10501 }
10502
10503 /* Initialize the pseudo register data. */
10504 int num_pseudos = 0;
10505 if (tdep->have_s_pseudos)
10506 {
10507 /* VFP single precision pseudo registers (S0~S31). */
10508 tdep->s_pseudo_base = register_count;
10509 tdep->s_pseudo_count = 32;
10510 num_pseudos += tdep->s_pseudo_count;
10511
10512 if (tdep->have_q_pseudos)
10513 {
10514 /* NEON quad precision pseudo registers (Q0~Q15). */
10515 tdep->q_pseudo_base = register_count + num_pseudos;
10516
10517 if (have_neon)
10518 tdep->q_pseudo_count = 16;
10519 else if (have_mve)
10520 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10521
10522 num_pseudos += tdep->q_pseudo_count;
10523 }
10524 }
10525
10526 /* Do we have any MVE pseudo registers? */
10527 if (have_mve)
10528 {
10529 tdep->mve_pseudo_base = register_count + num_pseudos;
10530 tdep->mve_pseudo_count = 1;
10531 num_pseudos += tdep->mve_pseudo_count;
10532 }
10533
10534 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10535 if (have_pacbti)
10536 {
10537 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10538 tdep->pacbti_pseudo_count = 1;
10539 num_pseudos += tdep->pacbti_pseudo_count;
10540 }
10541
10542 /* Set some pseudo register hooks, if we have pseudo registers. */
10543 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10544 {
10545 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10546 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10547 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10548 }
10549
10550 /* Add standard register aliases. We add aliases even for those
10551 names which are used by the current architecture - it's simpler,
10552 and does no harm, since nothing ever lists user registers. */
10553 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10554 user_reg_add (gdbarch, arm_register_aliases[i].name,
10555 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10556
10557 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10558 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10559
10560 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10561
10562 return gdbarch;
10563 }
10564
10565 static void
10566 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10567 {
10568 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
10569
10570 if (tdep == NULL)
10571 return;
10572
10573 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10574 (int) tdep->fp_model);
10575 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10576 (int) tdep->have_fpa_registers);
10577 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10578 (int) tdep->have_wmmx_registers);
10579 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10580 (int) tdep->vfp_register_count);
10581 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10582 tdep->have_s_pseudos? "true" : "false");
10583 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10584 (int) tdep->s_pseudo_base);
10585 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10586 (int) tdep->s_pseudo_count);
10587 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10588 tdep->have_q_pseudos? "true" : "false");
10589 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10590 (int) tdep->q_pseudo_base);
10591 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10592 (int) tdep->q_pseudo_count);
10593 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10594 (int) tdep->have_neon);
10595 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10596 tdep->have_mve? "yes" : "no");
10597 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10598 tdep->mve_vpr_regnum);
10599 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10600 tdep->mve_pseudo_base);
10601 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10602 tdep->mve_pseudo_count);
10603 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10604 tdep->m_profile_msp_regnum);
10605 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10606 tdep->m_profile_psp_regnum);
10607 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10608 tdep->m_profile_msp_ns_regnum);
10609 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10610 tdep->m_profile_psp_ns_regnum);
10611 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10612 tdep->m_profile_msp_s_regnum);
10613 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10614 tdep->m_profile_psp_s_regnum);
10615 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10616 (unsigned long) tdep->lowest_pc);
10617 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10618 tdep->have_pacbti? "yes" : "no");
10619 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10620 tdep->pacbti_pseudo_base);
10621 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10622 tdep->pacbti_pseudo_count);
10623 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10624 tdep->is_m? "yes" : "no");
10625 }
10626
10627 #if GDB_SELF_TEST
10628 namespace selftests
10629 {
10630 static void arm_record_test (void);
10631 static void arm_analyze_prologue_test ();
10632 }
10633 #endif
10634
10635 void _initialize_arm_tdep ();
10636 void
10637 _initialize_arm_tdep ()
10638 {
10639 long length;
10640 int i, j;
10641 char regdesc[1024], *rdptr = regdesc;
10642 size_t rest = sizeof (regdesc);
10643
10644 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10645
10646 /* Add ourselves to objfile event chain. */
10647 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10648
10649 /* Register an ELF OS ABI sniffer for ARM binaries. */
10650 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10651 bfd_target_elf_flavour,
10652 arm_elf_osabi_sniffer);
10653
10654 /* Add root prefix command for all "set arm"/"show arm" commands. */
10655 add_setshow_prefix_cmd ("arm", no_class,
10656 _("Various ARM-specific commands."),
10657 _("Various ARM-specific commands."),
10658 &setarmcmdlist, &showarmcmdlist,
10659 &setlist, &showlist);
10660
10661 arm_disassembler_options = xstrdup ("reg-names-std");
10662 const disasm_options_t *disasm_options
10663 = &disassembler_options_arm ()->options;
10664 int num_disassembly_styles = 0;
10665 for (i = 0; disasm_options->name[i] != NULL; i++)
10666 if (startswith (disasm_options->name[i], "reg-names-"))
10667 num_disassembly_styles++;
10668
10669 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10670 valid_disassembly_styles = XNEWVEC (const char *,
10671 num_disassembly_styles + 1);
10672 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10673 if (startswith (disasm_options->name[i], "reg-names-"))
10674 {
10675 size_t offset = strlen ("reg-names-");
10676 const char *style = disasm_options->name[i];
10677 valid_disassembly_styles[j++] = &style[offset];
10678 if (strcmp (&style[offset], "std") == 0)
10679 disassembly_style = &style[offset];
10680 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10681 disasm_options->description[i]);
10682 rdptr += length;
10683 rest -= length;
10684 }
10685 /* Mark the end of valid options. */
10686 valid_disassembly_styles[num_disassembly_styles] = NULL;
10687
10688 /* Create the help text. */
10689 std::string helptext = string_printf ("%s%s%s",
10690 _("The valid values are:\n"),
10691 regdesc,
10692 _("The default is \"std\"."));
10693
10694 add_setshow_enum_cmd("disassembler", no_class,
10695 valid_disassembly_styles, &disassembly_style,
10696 _("Set the disassembly style."),
10697 _("Show the disassembly style."),
10698 helptext.c_str (),
10699 set_disassembly_style_sfunc,
10700 show_disassembly_style_sfunc,
10701 &setarmcmdlist, &showarmcmdlist);
10702
10703 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10704 _("Set usage of ARM 32-bit mode."),
10705 _("Show usage of ARM 32-bit mode."),
10706 _("When off, a 26-bit PC will be used."),
10707 NULL,
10708 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10709 mode is %s. */
10710 &setarmcmdlist, &showarmcmdlist);
10711
10712 /* Add a command to allow the user to force the FPU model. */
10713 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10714 _("Set the floating point type."),
10715 _("Show the floating point type."),
10716 _("auto - Determine the FP typefrom the OS-ABI.\n\
10717 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10718 fpa - FPA co-processor (GCC compiled).\n\
10719 softvfp - Software FP with pure-endian doubles.\n\
10720 vfp - VFP co-processor."),
10721 set_fp_model_sfunc, show_fp_model,
10722 &setarmcmdlist, &showarmcmdlist);
10723
10724 /* Add a command to allow the user to force the ABI. */
10725 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10726 _("Set the ABI."),
10727 _("Show the ABI."),
10728 NULL, arm_set_abi, arm_show_abi,
10729 &setarmcmdlist, &showarmcmdlist);
10730
10731 /* Add two commands to allow the user to force the assumed
10732 execution mode. */
10733 add_setshow_enum_cmd ("fallback-mode", class_support,
10734 arm_mode_strings, &arm_fallback_mode_string,
10735 _("Set the mode assumed when symbols are unavailable."),
10736 _("Show the mode assumed when symbols are unavailable."),
10737 NULL, NULL, arm_show_fallback_mode,
10738 &setarmcmdlist, &showarmcmdlist);
10739 add_setshow_enum_cmd ("force-mode", class_support,
10740 arm_mode_strings, &arm_force_mode_string,
10741 _("Set the mode assumed even when symbols are available."),
10742 _("Show the mode assumed even when symbols are available."),
10743 NULL, NULL, arm_show_force_mode,
10744 &setarmcmdlist, &showarmcmdlist);
10745
10746 /* Add a command to stop triggering security exceptions when
10747 unwinding exception stacks. */
10748 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
10749 _("Set usage of non-secure to secure exception stack unwinding."),
10750 _("Show usage of non-secure to secure exception stack unwinding."),
10751 _("When on, the debugger can trigger memory access traps."),
10752 NULL, arm_show_unwind_secure_frames,
10753 &setarmcmdlist, &showarmcmdlist);
10754
10755 /* Debugging flag. */
10756 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10757 _("Set ARM debugging."),
10758 _("Show ARM debugging."),
10759 _("When on, arm-specific debugging is enabled."),
10760 NULL,
10761 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10762 &setdebuglist, &showdebuglist);
10763
10764 #if GDB_SELF_TEST
10765 selftests::register_test ("arm-record", selftests::arm_record_test);
10766 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
10767 #endif
10768
10769 }
10770
10771 /* ARM-reversible process record data structures. */
10772
10773 #define ARM_INSN_SIZE_BYTES 4
10774 #define THUMB_INSN_SIZE_BYTES 2
10775 #define THUMB2_INSN_SIZE_BYTES 4
10776
10777
10778 /* Position of the bit within a 32-bit ARM instruction
10779 that defines whether the instruction is a load or store. */
10780 #define INSN_S_L_BIT_NUM 20
10781
10782 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10783 do \
10784 { \
10785 unsigned int reg_len = LENGTH; \
10786 if (reg_len) \
10787 { \
10788 REGS = XNEWVEC (uint32_t, reg_len); \
10789 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10790 } \
10791 } \
10792 while (0)
10793
10794 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10795 do \
10796 { \
10797 unsigned int mem_len = LENGTH; \
10798 if (mem_len) \
10799 { \
10800 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10801 memcpy(&MEMS->len, &RECORD_BUF[0], \
10802 sizeof(struct arm_mem_r) * LENGTH); \
10803 } \
10804 } \
10805 while (0)
10806
10807 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10808 #define INSN_RECORDED(ARM_RECORD) \
10809 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10810
10811 /* ARM memory record structure. */
10812 struct arm_mem_r
10813 {
10814 uint32_t len; /* Record length. */
10815 uint32_t addr; /* Memory address. */
10816 };
10817
10818 /* ARM instruction record contains opcode of current insn
10819 and execution state (before entry to decode_insn()),
10820 contains list of to-be-modified registers and
10821 memory blocks (on return from decode_insn()). */
10822
10823 struct arm_insn_decode_record
10824 {
10825 struct gdbarch *gdbarch;
10826 struct regcache *regcache;
10827 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10828 uint32_t arm_insn; /* Should accommodate thumb. */
10829 uint32_t cond; /* Condition code. */
10830 uint32_t opcode; /* Insn opcode. */
10831 uint32_t decode; /* Insn decode bits. */
10832 uint32_t mem_rec_count; /* No of mem records. */
10833 uint32_t reg_rec_count; /* No of reg records. */
10834 uint32_t *arm_regs; /* Registers to be saved for this record. */
10835 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10836 };
10837
10838
10839 /* Checks ARM SBZ and SBO mandatory fields. */
10840
10841 static int
10842 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10843 {
10844 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10845
10846 if (!len)
10847 return 1;
10848
10849 if (!sbo)
10850 ones = ~ones;
10851
10852 while (ones)
10853 {
10854 if (!(ones & sbo))
10855 {
10856 return 0;
10857 }
10858 ones = ones >> 1;
10859 }
10860 return 1;
10861 }
10862
10863 enum arm_record_result
10864 {
10865 ARM_RECORD_SUCCESS = 0,
10866 ARM_RECORD_FAILURE = 1
10867 };
10868
10869 enum arm_record_strx_t
10870 {
10871 ARM_RECORD_STRH=1,
10872 ARM_RECORD_STRD
10873 };
10874
10875 enum record_type_t
10876 {
10877 ARM_RECORD=1,
10878 THUMB_RECORD,
10879 THUMB2_RECORD
10880 };
10881
10882
10883 static int
10884 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
10885 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10886 {
10887
10888 struct regcache *reg_cache = arm_insn_r->regcache;
10889 ULONGEST u_regval[2]= {0};
10890
10891 uint32_t reg_src1 = 0, reg_src2 = 0;
10892 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10893
10894 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10895 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10896
10897 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10898 {
10899 /* 1) Handle misc store, immediate offset. */
10900 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10901 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10902 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10903 regcache_raw_read_unsigned (reg_cache, reg_src1,
10904 &u_regval[0]);
10905 if (ARM_PC_REGNUM == reg_src1)
10906 {
10907 /* If R15 was used as Rn, hence current PC+8. */
10908 u_regval[0] = u_regval[0] + 8;
10909 }
10910 offset_8 = (immed_high << 4) | immed_low;
10911 /* Calculate target store address. */
10912 if (14 == arm_insn_r->opcode)
10913 {
10914 tgt_mem_addr = u_regval[0] + offset_8;
10915 }
10916 else
10917 {
10918 tgt_mem_addr = u_regval[0] - offset_8;
10919 }
10920 if (ARM_RECORD_STRH == str_type)
10921 {
10922 record_buf_mem[0] = 2;
10923 record_buf_mem[1] = tgt_mem_addr;
10924 arm_insn_r->mem_rec_count = 1;
10925 }
10926 else if (ARM_RECORD_STRD == str_type)
10927 {
10928 record_buf_mem[0] = 4;
10929 record_buf_mem[1] = tgt_mem_addr;
10930 record_buf_mem[2] = 4;
10931 record_buf_mem[3] = tgt_mem_addr + 4;
10932 arm_insn_r->mem_rec_count = 2;
10933 }
10934 }
10935 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10936 {
10937 /* 2) Store, register offset. */
10938 /* Get Rm. */
10939 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10940 /* Get Rn. */
10941 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10942 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10943 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10944 if (15 == reg_src2)
10945 {
10946 /* If R15 was used as Rn, hence current PC+8. */
10947 u_regval[0] = u_regval[0] + 8;
10948 }
10949 /* Calculate target store address, Rn +/- Rm, register offset. */
10950 if (12 == arm_insn_r->opcode)
10951 {
10952 tgt_mem_addr = u_regval[0] + u_regval[1];
10953 }
10954 else
10955 {
10956 tgt_mem_addr = u_regval[1] - u_regval[0];
10957 }
10958 if (ARM_RECORD_STRH == str_type)
10959 {
10960 record_buf_mem[0] = 2;
10961 record_buf_mem[1] = tgt_mem_addr;
10962 arm_insn_r->mem_rec_count = 1;
10963 }
10964 else if (ARM_RECORD_STRD == str_type)
10965 {
10966 record_buf_mem[0] = 4;
10967 record_buf_mem[1] = tgt_mem_addr;
10968 record_buf_mem[2] = 4;
10969 record_buf_mem[3] = tgt_mem_addr + 4;
10970 arm_insn_r->mem_rec_count = 2;
10971 }
10972 }
10973 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10974 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10975 {
10976 /* 3) Store, immediate pre-indexed. */
10977 /* 5) Store, immediate post-indexed. */
10978 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10979 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10980 offset_8 = (immed_high << 4) | immed_low;
10981 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10982 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10983 /* Calculate target store address, Rn +/- Rm, register offset. */
10984 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10985 {
10986 tgt_mem_addr = u_regval[0] + offset_8;
10987 }
10988 else
10989 {
10990 tgt_mem_addr = u_regval[0] - offset_8;
10991 }
10992 if (ARM_RECORD_STRH == str_type)
10993 {
10994 record_buf_mem[0] = 2;
10995 record_buf_mem[1] = tgt_mem_addr;
10996 arm_insn_r->mem_rec_count = 1;
10997 }
10998 else if (ARM_RECORD_STRD == str_type)
10999 {
11000 record_buf_mem[0] = 4;
11001 record_buf_mem[1] = tgt_mem_addr;
11002 record_buf_mem[2] = 4;
11003 record_buf_mem[3] = tgt_mem_addr + 4;
11004 arm_insn_r->mem_rec_count = 2;
11005 }
11006 /* Record Rn also as it changes. */
11007 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11008 arm_insn_r->reg_rec_count = 1;
11009 }
11010 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11011 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11012 {
11013 /* 4) Store, register pre-indexed. */
11014 /* 6) Store, register post -indexed. */
11015 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11016 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11017 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11018 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11019 /* Calculate target store address, Rn +/- Rm, register offset. */
11020 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11021 {
11022 tgt_mem_addr = u_regval[0] + u_regval[1];
11023 }
11024 else
11025 {
11026 tgt_mem_addr = u_regval[1] - u_regval[0];
11027 }
11028 if (ARM_RECORD_STRH == str_type)
11029 {
11030 record_buf_mem[0] = 2;
11031 record_buf_mem[1] = tgt_mem_addr;
11032 arm_insn_r->mem_rec_count = 1;
11033 }
11034 else if (ARM_RECORD_STRD == str_type)
11035 {
11036 record_buf_mem[0] = 4;
11037 record_buf_mem[1] = tgt_mem_addr;
11038 record_buf_mem[2] = 4;
11039 record_buf_mem[3] = tgt_mem_addr + 4;
11040 arm_insn_r->mem_rec_count = 2;
11041 }
11042 /* Record Rn also as it changes. */
11043 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11044 arm_insn_r->reg_rec_count = 1;
11045 }
11046 return 0;
11047 }
11048
11049 /* Handling ARM extension space insns. */
11050
11051 static int
11052 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11053 {
11054 int ret = 0; /* Return value: -1:record failure ; 0:success */
11055 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11056 uint32_t record_buf[8], record_buf_mem[8];
11057 uint32_t reg_src1 = 0;
11058 struct regcache *reg_cache = arm_insn_r->regcache;
11059 ULONGEST u_regval = 0;
11060
11061 gdb_assert (!INSN_RECORDED(arm_insn_r));
11062 /* Handle unconditional insn extension space. */
11063
11064 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11065 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11066 if (arm_insn_r->cond)
11067 {
11068 /* PLD has no affect on architectural state, it just affects
11069 the caches. */
11070 if (5 == ((opcode1 & 0xE0) >> 5))
11071 {
11072 /* BLX(1) */
11073 record_buf[0] = ARM_PS_REGNUM;
11074 record_buf[1] = ARM_LR_REGNUM;
11075 arm_insn_r->reg_rec_count = 2;
11076 }
11077 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11078 }
11079
11080
11081 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11082 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11083 {
11084 ret = -1;
11085 /* Undefined instruction on ARM V5; need to handle if later
11086 versions define it. */
11087 }
11088
11089 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11090 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11091 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11092
11093 /* Handle arithmetic insn extension space. */
11094 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11095 && !INSN_RECORDED(arm_insn_r))
11096 {
11097 /* Handle MLA(S) and MUL(S). */
11098 if (in_inclusive_range (insn_op1, 0U, 3U))
11099 {
11100 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11101 record_buf[1] = ARM_PS_REGNUM;
11102 arm_insn_r->reg_rec_count = 2;
11103 }
11104 else if (in_inclusive_range (insn_op1, 4U, 15U))
11105 {
11106 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11107 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11108 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11109 record_buf[2] = ARM_PS_REGNUM;
11110 arm_insn_r->reg_rec_count = 3;
11111 }
11112 }
11113
11114 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11115 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11116 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11117
11118 /* Handle control insn extension space. */
11119
11120 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11121 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11122 {
11123 if (!bit (arm_insn_r->arm_insn,25))
11124 {
11125 if (!bits (arm_insn_r->arm_insn, 4, 7))
11126 {
11127 if ((0 == insn_op1) || (2 == insn_op1))
11128 {
11129 /* MRS. */
11130 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11131 arm_insn_r->reg_rec_count = 1;
11132 }
11133 else if (1 == insn_op1)
11134 {
11135 /* CSPR is going to be changed. */
11136 record_buf[0] = ARM_PS_REGNUM;
11137 arm_insn_r->reg_rec_count = 1;
11138 }
11139 else if (3 == insn_op1)
11140 {
11141 /* SPSR is going to be changed. */
11142 /* We need to get SPSR value, which is yet to be done. */
11143 return -1;
11144 }
11145 }
11146 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11147 {
11148 if (1 == insn_op1)
11149 {
11150 /* BX. */
11151 record_buf[0] = ARM_PS_REGNUM;
11152 arm_insn_r->reg_rec_count = 1;
11153 }
11154 else if (3 == insn_op1)
11155 {
11156 /* CLZ. */
11157 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11158 arm_insn_r->reg_rec_count = 1;
11159 }
11160 }
11161 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11162 {
11163 /* BLX. */
11164 record_buf[0] = ARM_PS_REGNUM;
11165 record_buf[1] = ARM_LR_REGNUM;
11166 arm_insn_r->reg_rec_count = 2;
11167 }
11168 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11169 {
11170 /* QADD, QSUB, QDADD, QDSUB */
11171 record_buf[0] = ARM_PS_REGNUM;
11172 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11173 arm_insn_r->reg_rec_count = 2;
11174 }
11175 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11176 {
11177 /* BKPT. */
11178 record_buf[0] = ARM_PS_REGNUM;
11179 record_buf[1] = ARM_LR_REGNUM;
11180 arm_insn_r->reg_rec_count = 2;
11181
11182 /* Save SPSR also;how? */
11183 return -1;
11184 }
11185 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11186 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11187 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11188 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11189 )
11190 {
11191 if (0 == insn_op1 || 1 == insn_op1)
11192 {
11193 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11194 /* We dont do optimization for SMULW<y> where we
11195 need only Rd. */
11196 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11197 record_buf[1] = ARM_PS_REGNUM;
11198 arm_insn_r->reg_rec_count = 2;
11199 }
11200 else if (2 == insn_op1)
11201 {
11202 /* SMLAL<x><y>. */
11203 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11204 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11205 arm_insn_r->reg_rec_count = 2;
11206 }
11207 else if (3 == insn_op1)
11208 {
11209 /* SMUL<x><y>. */
11210 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11211 arm_insn_r->reg_rec_count = 1;
11212 }
11213 }
11214 }
11215 else
11216 {
11217 /* MSR : immediate form. */
11218 if (1 == insn_op1)
11219 {
11220 /* CSPR is going to be changed. */
11221 record_buf[0] = ARM_PS_REGNUM;
11222 arm_insn_r->reg_rec_count = 1;
11223 }
11224 else if (3 == insn_op1)
11225 {
11226 /* SPSR is going to be changed. */
11227 /* we need to get SPSR value, which is yet to be done */
11228 return -1;
11229 }
11230 }
11231 }
11232
11233 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11234 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11235 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11236
11237 /* Handle load/store insn extension space. */
11238
11239 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11240 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11241 && !INSN_RECORDED(arm_insn_r))
11242 {
11243 /* SWP/SWPB. */
11244 if (0 == insn_op1)
11245 {
11246 /* These insn, changes register and memory as well. */
11247 /* SWP or SWPB insn. */
11248 /* Get memory address given by Rn. */
11249 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11250 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11251 /* SWP insn ?, swaps word. */
11252 if (8 == arm_insn_r->opcode)
11253 {
11254 record_buf_mem[0] = 4;
11255 }
11256 else
11257 {
11258 /* SWPB insn, swaps only byte. */
11259 record_buf_mem[0] = 1;
11260 }
11261 record_buf_mem[1] = u_regval;
11262 arm_insn_r->mem_rec_count = 1;
11263 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11264 arm_insn_r->reg_rec_count = 1;
11265 }
11266 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11267 {
11268 /* STRH. */
11269 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11270 ARM_RECORD_STRH);
11271 }
11272 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11273 {
11274 /* LDRD. */
11275 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11276 record_buf[1] = record_buf[0] + 1;
11277 arm_insn_r->reg_rec_count = 2;
11278 }
11279 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11280 {
11281 /* STRD. */
11282 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11283 ARM_RECORD_STRD);
11284 }
11285 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11286 {
11287 /* LDRH, LDRSB, LDRSH. */
11288 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11289 arm_insn_r->reg_rec_count = 1;
11290 }
11291
11292 }
11293
11294 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11295 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11296 && !INSN_RECORDED(arm_insn_r))
11297 {
11298 ret = -1;
11299 /* Handle coprocessor insn extension space. */
11300 }
11301
11302 /* To be done for ARMv5 and later; as of now we return -1. */
11303 if (-1 == ret)
11304 return ret;
11305
11306 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11307 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11308
11309 return ret;
11310 }
11311
11312 /* Handling opcode 000 insns. */
11313
11314 static int
11315 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11316 {
11317 struct regcache *reg_cache = arm_insn_r->regcache;
11318 uint32_t record_buf[8], record_buf_mem[8];
11319 ULONGEST u_regval[2] = {0};
11320
11321 uint32_t reg_src1 = 0;
11322 uint32_t opcode1 = 0;
11323
11324 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11325 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11326 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11327
11328 if (!((opcode1 & 0x19) == 0x10))
11329 {
11330 /* Data-processing (register) and Data-processing (register-shifted
11331 register */
11332 /* Out of 11 shifter operands mode, all the insn modifies destination
11333 register, which is specified by 13-16 decode. */
11334 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11335 record_buf[1] = ARM_PS_REGNUM;
11336 arm_insn_r->reg_rec_count = 2;
11337 }
11338 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11339 {
11340 /* Miscellaneous instructions */
11341
11342 if (3 == arm_insn_r->decode && 0x12 == opcode1
11343 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11344 {
11345 /* Handle BLX, branch and link/exchange. */
11346 if (9 == arm_insn_r->opcode)
11347 {
11348 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11349 and R14 stores the return address. */
11350 record_buf[0] = ARM_PS_REGNUM;
11351 record_buf[1] = ARM_LR_REGNUM;
11352 arm_insn_r->reg_rec_count = 2;
11353 }
11354 }
11355 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11356 {
11357 /* Handle enhanced software breakpoint insn, BKPT. */
11358 /* CPSR is changed to be executed in ARM state, disabling normal
11359 interrupts, entering abort mode. */
11360 /* According to high vector configuration PC is set. */
11361 /* user hit breakpoint and type reverse, in
11362 that case, we need to go back with previous CPSR and
11363 Program Counter. */
11364 record_buf[0] = ARM_PS_REGNUM;
11365 record_buf[1] = ARM_LR_REGNUM;
11366 arm_insn_r->reg_rec_count = 2;
11367
11368 /* Save SPSR also; how? */
11369 return -1;
11370 }
11371 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11372 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11373 {
11374 /* Handle BX, branch and link/exchange. */
11375 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11376 record_buf[0] = ARM_PS_REGNUM;
11377 arm_insn_r->reg_rec_count = 1;
11378 }
11379 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11380 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11381 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11382 {
11383 /* Count leading zeros: CLZ. */
11384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11385 arm_insn_r->reg_rec_count = 1;
11386 }
11387 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11388 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11389 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11390 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11391 {
11392 /* Handle MRS insn. */
11393 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11394 arm_insn_r->reg_rec_count = 1;
11395 }
11396 }
11397 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11398 {
11399 /* Multiply and multiply-accumulate */
11400
11401 /* Handle multiply instructions. */
11402 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11403 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11404 {
11405 /* Handle MLA and MUL. */
11406 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11407 record_buf[1] = ARM_PS_REGNUM;
11408 arm_insn_r->reg_rec_count = 2;
11409 }
11410 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11411 {
11412 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11413 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11414 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11415 record_buf[2] = ARM_PS_REGNUM;
11416 arm_insn_r->reg_rec_count = 3;
11417 }
11418 }
11419 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11420 {
11421 /* Synchronization primitives */
11422
11423 /* Handling SWP, SWPB. */
11424 /* These insn, changes register and memory as well. */
11425 /* SWP or SWPB insn. */
11426
11427 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11428 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11429 /* SWP insn ?, swaps word. */
11430 if (8 == arm_insn_r->opcode)
11431 {
11432 record_buf_mem[0] = 4;
11433 }
11434 else
11435 {
11436 /* SWPB insn, swaps only byte. */
11437 record_buf_mem[0] = 1;
11438 }
11439 record_buf_mem[1] = u_regval[0];
11440 arm_insn_r->mem_rec_count = 1;
11441 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11442 arm_insn_r->reg_rec_count = 1;
11443 }
11444 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11445 || 15 == arm_insn_r->decode)
11446 {
11447 if ((opcode1 & 0x12) == 2)
11448 {
11449 /* Extra load/store (unprivileged) */
11450 return -1;
11451 }
11452 else
11453 {
11454 /* Extra load/store */
11455 switch (bits (arm_insn_r->arm_insn, 5, 6))
11456 {
11457 case 1:
11458 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11459 {
11460 /* STRH (register), STRH (immediate) */
11461 arm_record_strx (arm_insn_r, &record_buf[0],
11462 &record_buf_mem[0], ARM_RECORD_STRH);
11463 }
11464 else if ((opcode1 & 0x05) == 0x1)
11465 {
11466 /* LDRH (register) */
11467 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11468 arm_insn_r->reg_rec_count = 1;
11469
11470 if (bit (arm_insn_r->arm_insn, 21))
11471 {
11472 /* Write back to Rn. */
11473 record_buf[arm_insn_r->reg_rec_count++]
11474 = bits (arm_insn_r->arm_insn, 16, 19);
11475 }
11476 }
11477 else if ((opcode1 & 0x05) == 0x5)
11478 {
11479 /* LDRH (immediate), LDRH (literal) */
11480 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11481
11482 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11483 arm_insn_r->reg_rec_count = 1;
11484
11485 if (rn != 15)
11486 {
11487 /*LDRH (immediate) */
11488 if (bit (arm_insn_r->arm_insn, 21))
11489 {
11490 /* Write back to Rn. */
11491 record_buf[arm_insn_r->reg_rec_count++] = rn;
11492 }
11493 }
11494 }
11495 else
11496 return -1;
11497 break;
11498 case 2:
11499 if ((opcode1 & 0x05) == 0x0)
11500 {
11501 /* LDRD (register) */
11502 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11503 record_buf[1] = record_buf[0] + 1;
11504 arm_insn_r->reg_rec_count = 2;
11505
11506 if (bit (arm_insn_r->arm_insn, 21))
11507 {
11508 /* Write back to Rn. */
11509 record_buf[arm_insn_r->reg_rec_count++]
11510 = bits (arm_insn_r->arm_insn, 16, 19);
11511 }
11512 }
11513 else if ((opcode1 & 0x05) == 0x1)
11514 {
11515 /* LDRSB (register) */
11516 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11517 arm_insn_r->reg_rec_count = 1;
11518
11519 if (bit (arm_insn_r->arm_insn, 21))
11520 {
11521 /* Write back to Rn. */
11522 record_buf[arm_insn_r->reg_rec_count++]
11523 = bits (arm_insn_r->arm_insn, 16, 19);
11524 }
11525 }
11526 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11527 {
11528 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11529 LDRSB (literal) */
11530 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11531
11532 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11533 arm_insn_r->reg_rec_count = 1;
11534
11535 if (rn != 15)
11536 {
11537 /*LDRD (immediate), LDRSB (immediate) */
11538 if (bit (arm_insn_r->arm_insn, 21))
11539 {
11540 /* Write back to Rn. */
11541 record_buf[arm_insn_r->reg_rec_count++] = rn;
11542 }
11543 }
11544 }
11545 else
11546 return -1;
11547 break;
11548 case 3:
11549 if ((opcode1 & 0x05) == 0x0)
11550 {
11551 /* STRD (register) */
11552 arm_record_strx (arm_insn_r, &record_buf[0],
11553 &record_buf_mem[0], ARM_RECORD_STRD);
11554 }
11555 else if ((opcode1 & 0x05) == 0x1)
11556 {
11557 /* LDRSH (register) */
11558 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11559 arm_insn_r->reg_rec_count = 1;
11560
11561 if (bit (arm_insn_r->arm_insn, 21))
11562 {
11563 /* Write back to Rn. */
11564 record_buf[arm_insn_r->reg_rec_count++]
11565 = bits (arm_insn_r->arm_insn, 16, 19);
11566 }
11567 }
11568 else if ((opcode1 & 0x05) == 0x4)
11569 {
11570 /* STRD (immediate) */
11571 arm_record_strx (arm_insn_r, &record_buf[0],
11572 &record_buf_mem[0], ARM_RECORD_STRD);
11573 }
11574 else if ((opcode1 & 0x05) == 0x5)
11575 {
11576 /* LDRSH (immediate), LDRSH (literal) */
11577 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11578 arm_insn_r->reg_rec_count = 1;
11579
11580 if (bit (arm_insn_r->arm_insn, 21))
11581 {
11582 /* Write back to Rn. */
11583 record_buf[arm_insn_r->reg_rec_count++]
11584 = bits (arm_insn_r->arm_insn, 16, 19);
11585 }
11586 }
11587 else
11588 return -1;
11589 break;
11590 default:
11591 return -1;
11592 }
11593 }
11594 }
11595 else
11596 {
11597 return -1;
11598 }
11599
11600 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11601 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11602 return 0;
11603 }
11604
11605 /* Handling opcode 001 insns. */
11606
11607 static int
11608 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11609 {
11610 uint32_t record_buf[8], record_buf_mem[8];
11611
11612 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11613 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11614
11615 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11616 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11617 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11618 )
11619 {
11620 /* Handle MSR insn. */
11621 if (9 == arm_insn_r->opcode)
11622 {
11623 /* CSPR is going to be changed. */
11624 record_buf[0] = ARM_PS_REGNUM;
11625 arm_insn_r->reg_rec_count = 1;
11626 }
11627 else
11628 {
11629 /* SPSR is going to be changed. */
11630 }
11631 }
11632 else if (arm_insn_r->opcode <= 15)
11633 {
11634 /* Normal data processing insns. */
11635 /* Out of 11 shifter operands mode, all the insn modifies destination
11636 register, which is specified by 13-16 decode. */
11637 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11638 record_buf[1] = ARM_PS_REGNUM;
11639 arm_insn_r->reg_rec_count = 2;
11640 }
11641 else
11642 {
11643 return -1;
11644 }
11645
11646 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11647 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11648 return 0;
11649 }
11650
11651 static int
11652 arm_record_media (arm_insn_decode_record *arm_insn_r)
11653 {
11654 uint32_t record_buf[8];
11655
11656 switch (bits (arm_insn_r->arm_insn, 22, 24))
11657 {
11658 case 0:
11659 /* Parallel addition and subtraction, signed */
11660 case 1:
11661 /* Parallel addition and subtraction, unsigned */
11662 case 2:
11663 case 3:
11664 /* Packing, unpacking, saturation and reversal */
11665 {
11666 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11667
11668 record_buf[arm_insn_r->reg_rec_count++] = rd;
11669 }
11670 break;
11671
11672 case 4:
11673 case 5:
11674 /* Signed multiplies */
11675 {
11676 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11677 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11678
11679 record_buf[arm_insn_r->reg_rec_count++] = rd;
11680 if (op1 == 0x0)
11681 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11682 else if (op1 == 0x4)
11683 record_buf[arm_insn_r->reg_rec_count++]
11684 = bits (arm_insn_r->arm_insn, 12, 15);
11685 }
11686 break;
11687
11688 case 6:
11689 {
11690 if (bit (arm_insn_r->arm_insn, 21)
11691 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
11692 {
11693 /* SBFX */
11694 record_buf[arm_insn_r->reg_rec_count++]
11695 = bits (arm_insn_r->arm_insn, 12, 15);
11696 }
11697 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
11698 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
11699 {
11700 /* USAD8 and USADA8 */
11701 record_buf[arm_insn_r->reg_rec_count++]
11702 = bits (arm_insn_r->arm_insn, 16, 19);
11703 }
11704 }
11705 break;
11706
11707 case 7:
11708 {
11709 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
11710 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
11711 {
11712 /* Permanently UNDEFINED */
11713 return -1;
11714 }
11715 else
11716 {
11717 /* BFC, BFI and UBFX */
11718 record_buf[arm_insn_r->reg_rec_count++]
11719 = bits (arm_insn_r->arm_insn, 12, 15);
11720 }
11721 }
11722 break;
11723
11724 default:
11725 return -1;
11726 }
11727
11728 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11729
11730 return 0;
11731 }
11732
11733 /* Handle ARM mode instructions with opcode 010. */
11734
11735 static int
11736 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
11737 {
11738 struct regcache *reg_cache = arm_insn_r->regcache;
11739
11740 uint32_t reg_base , reg_dest;
11741 uint32_t offset_12, tgt_mem_addr;
11742 uint32_t record_buf[8], record_buf_mem[8];
11743 unsigned char wback;
11744 ULONGEST u_regval;
11745
11746 /* Calculate wback. */
11747 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11748 || (bit (arm_insn_r->arm_insn, 21) == 1);
11749
11750 arm_insn_r->reg_rec_count = 0;
11751 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11752
11753 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11754 {
11755 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11756 and LDRT. */
11757
11758 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11759 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11760
11761 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11762 preceeds a LDR instruction having R15 as reg_base, it
11763 emulates a branch and link instruction, and hence we need to save
11764 CPSR and PC as well. */
11765 if (ARM_PC_REGNUM == reg_dest)
11766 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11767
11768 /* If wback is true, also save the base register, which is going to be
11769 written to. */
11770 if (wback)
11771 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11772 }
11773 else
11774 {
11775 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11776
11777 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11778 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11779
11780 /* Handle bit U. */
11781 if (bit (arm_insn_r->arm_insn, 23))
11782 {
11783 /* U == 1: Add the offset. */
11784 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11785 }
11786 else
11787 {
11788 /* U == 0: subtract the offset. */
11789 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11790 }
11791
11792 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11793 bytes. */
11794 if (bit (arm_insn_r->arm_insn, 22))
11795 {
11796 /* STRB and STRBT: 1 byte. */
11797 record_buf_mem[0] = 1;
11798 }
11799 else
11800 {
11801 /* STR and STRT: 4 bytes. */
11802 record_buf_mem[0] = 4;
11803 }
11804
11805 /* Handle bit P. */
11806 if (bit (arm_insn_r->arm_insn, 24))
11807 record_buf_mem[1] = tgt_mem_addr;
11808 else
11809 record_buf_mem[1] = (uint32_t) u_regval;
11810
11811 arm_insn_r->mem_rec_count = 1;
11812
11813 /* If wback is true, also save the base register, which is going to be
11814 written to. */
11815 if (wback)
11816 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11817 }
11818
11819 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11820 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11821 return 0;
11822 }
11823
11824 /* Handling opcode 011 insns. */
11825
11826 static int
11827 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
11828 {
11829 struct regcache *reg_cache = arm_insn_r->regcache;
11830
11831 uint32_t shift_imm = 0;
11832 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11833 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11834 uint32_t record_buf[8], record_buf_mem[8];
11835
11836 LONGEST s_word;
11837 ULONGEST u_regval[2];
11838
11839 if (bit (arm_insn_r->arm_insn, 4))
11840 return arm_record_media (arm_insn_r);
11841
11842 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11843 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11844
11845 /* Handle enhanced store insns and LDRD DSP insn,
11846 order begins according to addressing modes for store insns
11847 STRH insn. */
11848
11849 /* LDR or STR? */
11850 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11851 {
11852 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11853 /* LDR insn has a capability to do branching, if
11854 MOV LR, PC is preceded by LDR insn having Rn as R15
11855 in that case, it emulates branch and link insn, and hence we
11856 need to save CSPR and PC as well. */
11857 if (15 != reg_dest)
11858 {
11859 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11860 arm_insn_r->reg_rec_count = 1;
11861 }
11862 else
11863 {
11864 record_buf[0] = reg_dest;
11865 record_buf[1] = ARM_PS_REGNUM;
11866 arm_insn_r->reg_rec_count = 2;
11867 }
11868 }
11869 else
11870 {
11871 if (! bits (arm_insn_r->arm_insn, 4, 11))
11872 {
11873 /* Store insn, register offset and register pre-indexed,
11874 register post-indexed. */
11875 /* Get Rm. */
11876 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11877 /* Get Rn. */
11878 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11879 regcache_raw_read_unsigned (reg_cache, reg_src1
11880 , &u_regval[0]);
11881 regcache_raw_read_unsigned (reg_cache, reg_src2
11882 , &u_regval[1]);
11883 if (15 == reg_src2)
11884 {
11885 /* If R15 was used as Rn, hence current PC+8. */
11886 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11887 u_regval[0] = u_regval[0] + 8;
11888 }
11889 /* Calculate target store address, Rn +/- Rm, register offset. */
11890 /* U == 1. */
11891 if (bit (arm_insn_r->arm_insn, 23))
11892 {
11893 tgt_mem_addr = u_regval[0] + u_regval[1];
11894 }
11895 else
11896 {
11897 tgt_mem_addr = u_regval[1] - u_regval[0];
11898 }
11899
11900 switch (arm_insn_r->opcode)
11901 {
11902 /* STR. */
11903 case 8:
11904 case 12:
11905 /* STR. */
11906 case 9:
11907 case 13:
11908 /* STRT. */
11909 case 1:
11910 case 5:
11911 /* STR. */
11912 case 0:
11913 case 4:
11914 record_buf_mem[0] = 4;
11915 break;
11916
11917 /* STRB. */
11918 case 10:
11919 case 14:
11920 /* STRB. */
11921 case 11:
11922 case 15:
11923 /* STRBT. */
11924 case 3:
11925 case 7:
11926 /* STRB. */
11927 case 2:
11928 case 6:
11929 record_buf_mem[0] = 1;
11930 break;
11931
11932 default:
11933 gdb_assert_not_reached ("no decoding pattern found");
11934 break;
11935 }
11936 record_buf_mem[1] = tgt_mem_addr;
11937 arm_insn_r->mem_rec_count = 1;
11938
11939 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11940 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11941 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11942 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11943 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11944 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11945 )
11946 {
11947 /* Rn is going to be changed in pre-indexed mode and
11948 post-indexed mode as well. */
11949 record_buf[0] = reg_src2;
11950 arm_insn_r->reg_rec_count = 1;
11951 }
11952 }
11953 else
11954 {
11955 /* Store insn, scaled register offset; scaled pre-indexed. */
11956 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11957 /* Get Rm. */
11958 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11959 /* Get Rn. */
11960 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11961 /* Get shift_imm. */
11962 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11963 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11964 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11965 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11966 /* Offset_12 used as shift. */
11967 switch (offset_12)
11968 {
11969 case 0:
11970 /* Offset_12 used as index. */
11971 offset_12 = u_regval[0] << shift_imm;
11972 break;
11973
11974 case 1:
11975 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11976 break;
11977
11978 case 2:
11979 if (!shift_imm)
11980 {
11981 if (bit (u_regval[0], 31))
11982 {
11983 offset_12 = 0xFFFFFFFF;
11984 }
11985 else
11986 {
11987 offset_12 = 0;
11988 }
11989 }
11990 else
11991 {
11992 /* This is arithmetic shift. */
11993 offset_12 = s_word >> shift_imm;
11994 }
11995 break;
11996
11997 case 3:
11998 if (!shift_imm)
11999 {
12000 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12001 &u_regval[1]);
12002 /* Get C flag value and shift it by 31. */
12003 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12004 | (u_regval[0]) >> 1);
12005 }
12006 else
12007 {
12008 offset_12 = (u_regval[0] >> shift_imm) \
12009 | (u_regval[0] <<
12010 (sizeof(uint32_t) - shift_imm));
12011 }
12012 break;
12013
12014 default:
12015 gdb_assert_not_reached ("no decoding pattern found");
12016 break;
12017 }
12018
12019 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12020 /* bit U set. */
12021 if (bit (arm_insn_r->arm_insn, 23))
12022 {
12023 tgt_mem_addr = u_regval[1] + offset_12;
12024 }
12025 else
12026 {
12027 tgt_mem_addr = u_regval[1] - offset_12;
12028 }
12029
12030 switch (arm_insn_r->opcode)
12031 {
12032 /* STR. */
12033 case 8:
12034 case 12:
12035 /* STR. */
12036 case 9:
12037 case 13:
12038 /* STRT. */
12039 case 1:
12040 case 5:
12041 /* STR. */
12042 case 0:
12043 case 4:
12044 record_buf_mem[0] = 4;
12045 break;
12046
12047 /* STRB. */
12048 case 10:
12049 case 14:
12050 /* STRB. */
12051 case 11:
12052 case 15:
12053 /* STRBT. */
12054 case 3:
12055 case 7:
12056 /* STRB. */
12057 case 2:
12058 case 6:
12059 record_buf_mem[0] = 1;
12060 break;
12061
12062 default:
12063 gdb_assert_not_reached ("no decoding pattern found");
12064 break;
12065 }
12066 record_buf_mem[1] = tgt_mem_addr;
12067 arm_insn_r->mem_rec_count = 1;
12068
12069 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12070 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12071 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12072 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12073 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12074 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12075 )
12076 {
12077 /* Rn is going to be changed in register scaled pre-indexed
12078 mode,and scaled post indexed mode. */
12079 record_buf[0] = reg_src2;
12080 arm_insn_r->reg_rec_count = 1;
12081 }
12082 }
12083 }
12084
12085 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12086 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12087 return 0;
12088 }
12089
12090 /* Handle ARM mode instructions with opcode 100. */
12091
12092 static int
12093 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12094 {
12095 struct regcache *reg_cache = arm_insn_r->regcache;
12096 uint32_t register_count = 0, register_bits;
12097 uint32_t reg_base, addr_mode;
12098 uint32_t record_buf[24], record_buf_mem[48];
12099 uint32_t wback;
12100 ULONGEST u_regval;
12101
12102 /* Fetch the list of registers. */
12103 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12104 arm_insn_r->reg_rec_count = 0;
12105
12106 /* Fetch the base register that contains the address we are loading data
12107 to. */
12108 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12109
12110 /* Calculate wback. */
12111 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12112
12113 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12114 {
12115 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12116
12117 /* Find out which registers are going to be loaded from memory. */
12118 while (register_bits)
12119 {
12120 if (register_bits & 0x00000001)
12121 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12122 register_bits = register_bits >> 1;
12123 register_count++;
12124 }
12125
12126
12127 /* If wback is true, also save the base register, which is going to be
12128 written to. */
12129 if (wback)
12130 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12131
12132 /* Save the CPSR register. */
12133 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12134 }
12135 else
12136 {
12137 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12138
12139 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12140
12141 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12142
12143 /* Find out how many registers are going to be stored to memory. */
12144 while (register_bits)
12145 {
12146 if (register_bits & 0x00000001)
12147 register_count++;
12148 register_bits = register_bits >> 1;
12149 }
12150
12151 switch (addr_mode)
12152 {
12153 /* STMDA (STMED): Decrement after. */
12154 case 0:
12155 record_buf_mem[1] = (uint32_t) u_regval
12156 - register_count * ARM_INT_REGISTER_SIZE + 4;
12157 break;
12158 /* STM (STMIA, STMEA): Increment after. */
12159 case 1:
12160 record_buf_mem[1] = (uint32_t) u_regval;
12161 break;
12162 /* STMDB (STMFD): Decrement before. */
12163 case 2:
12164 record_buf_mem[1] = (uint32_t) u_regval
12165 - register_count * ARM_INT_REGISTER_SIZE;
12166 break;
12167 /* STMIB (STMFA): Increment before. */
12168 case 3:
12169 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12170 break;
12171 default:
12172 gdb_assert_not_reached ("no decoding pattern found");
12173 break;
12174 }
12175
12176 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12177 arm_insn_r->mem_rec_count = 1;
12178
12179 /* If wback is true, also save the base register, which is going to be
12180 written to. */
12181 if (wback)
12182 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12183 }
12184
12185 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12186 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12187 return 0;
12188 }
12189
12190 /* Handling opcode 101 insns. */
12191
12192 static int
12193 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12194 {
12195 uint32_t record_buf[8];
12196
12197 /* Handle B, BL, BLX(1) insns. */
12198 /* B simply branches so we do nothing here. */
12199 /* Note: BLX(1) doesnt fall here but instead it falls into
12200 extension space. */
12201 if (bit (arm_insn_r->arm_insn, 24))
12202 {
12203 record_buf[0] = ARM_LR_REGNUM;
12204 arm_insn_r->reg_rec_count = 1;
12205 }
12206
12207 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12208
12209 return 0;
12210 }
12211
12212 static int
12213 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12214 {
12215 gdb_printf (gdb_stderr,
12216 _("Process record does not support instruction "
12217 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12218 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12219
12220 return -1;
12221 }
12222
12223 /* Record handler for vector data transfer instructions. */
12224
12225 static int
12226 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12227 {
12228 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12229 uint32_t record_buf[4];
12230
12231 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12232 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12233 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12234 bit_l = bit (arm_insn_r->arm_insn, 20);
12235 bit_c = bit (arm_insn_r->arm_insn, 8);
12236
12237 /* Handle VMOV instruction. */
12238 if (bit_l && bit_c)
12239 {
12240 record_buf[0] = reg_t;
12241 arm_insn_r->reg_rec_count = 1;
12242 }
12243 else if (bit_l && !bit_c)
12244 {
12245 /* Handle VMOV instruction. */
12246 if (bits_a == 0x00)
12247 {
12248 record_buf[0] = reg_t;
12249 arm_insn_r->reg_rec_count = 1;
12250 }
12251 /* Handle VMRS instruction. */
12252 else if (bits_a == 0x07)
12253 {
12254 if (reg_t == 15)
12255 reg_t = ARM_PS_REGNUM;
12256
12257 record_buf[0] = reg_t;
12258 arm_insn_r->reg_rec_count = 1;
12259 }
12260 }
12261 else if (!bit_l && !bit_c)
12262 {
12263 /* Handle VMOV instruction. */
12264 if (bits_a == 0x00)
12265 {
12266 record_buf[0] = ARM_D0_REGNUM + reg_v;
12267
12268 arm_insn_r->reg_rec_count = 1;
12269 }
12270 /* Handle VMSR instruction. */
12271 else if (bits_a == 0x07)
12272 {
12273 record_buf[0] = ARM_FPSCR_REGNUM;
12274 arm_insn_r->reg_rec_count = 1;
12275 }
12276 }
12277 else if (!bit_l && bit_c)
12278 {
12279 /* Handle VMOV instruction. */
12280 if (!(bits_a & 0x04))
12281 {
12282 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12283 + ARM_D0_REGNUM;
12284 arm_insn_r->reg_rec_count = 1;
12285 }
12286 /* Handle VDUP instruction. */
12287 else
12288 {
12289 if (bit (arm_insn_r->arm_insn, 21))
12290 {
12291 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12292 record_buf[0] = reg_v + ARM_D0_REGNUM;
12293 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12294 arm_insn_r->reg_rec_count = 2;
12295 }
12296 else
12297 {
12298 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12299 record_buf[0] = reg_v + ARM_D0_REGNUM;
12300 arm_insn_r->reg_rec_count = 1;
12301 }
12302 }
12303 }
12304
12305 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12306 return 0;
12307 }
12308
12309 /* Record handler for extension register load/store instructions. */
12310
12311 static int
12312 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12313 {
12314 uint32_t opcode, single_reg;
12315 uint8_t op_vldm_vstm;
12316 uint32_t record_buf[8], record_buf_mem[128];
12317 ULONGEST u_regval = 0;
12318
12319 struct regcache *reg_cache = arm_insn_r->regcache;
12320
12321 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12322 single_reg = !bit (arm_insn_r->arm_insn, 8);
12323 op_vldm_vstm = opcode & 0x1b;
12324
12325 /* Handle VMOV instructions. */
12326 if ((opcode & 0x1e) == 0x04)
12327 {
12328 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12329 {
12330 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12331 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12332 arm_insn_r->reg_rec_count = 2;
12333 }
12334 else
12335 {
12336 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12337 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12338
12339 if (single_reg)
12340 {
12341 /* The first S register number m is REG_M:M (M is bit 5),
12342 the corresponding D register number is REG_M:M / 2, which
12343 is REG_M. */
12344 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12345 /* The second S register number is REG_M:M + 1, the
12346 corresponding D register number is (REG_M:M + 1) / 2.
12347 IOW, if bit M is 1, the first and second S registers
12348 are mapped to different D registers, otherwise, they are
12349 in the same D register. */
12350 if (bit_m)
12351 {
12352 record_buf[arm_insn_r->reg_rec_count++]
12353 = ARM_D0_REGNUM + reg_m + 1;
12354 }
12355 }
12356 else
12357 {
12358 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12359 arm_insn_r->reg_rec_count = 1;
12360 }
12361 }
12362 }
12363 /* Handle VSTM and VPUSH instructions. */
12364 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12365 || op_vldm_vstm == 0x12)
12366 {
12367 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12368 uint32_t memory_index = 0;
12369
12370 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12371 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12372 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12373 imm_off32 = imm_off8 << 2;
12374 memory_count = imm_off8;
12375
12376 if (bit (arm_insn_r->arm_insn, 23))
12377 start_address = u_regval;
12378 else
12379 start_address = u_regval - imm_off32;
12380
12381 if (bit (arm_insn_r->arm_insn, 21))
12382 {
12383 record_buf[0] = reg_rn;
12384 arm_insn_r->reg_rec_count = 1;
12385 }
12386
12387 while (memory_count > 0)
12388 {
12389 if (single_reg)
12390 {
12391 record_buf_mem[memory_index] = 4;
12392 record_buf_mem[memory_index + 1] = start_address;
12393 start_address = start_address + 4;
12394 memory_index = memory_index + 2;
12395 }
12396 else
12397 {
12398 record_buf_mem[memory_index] = 4;
12399 record_buf_mem[memory_index + 1] = start_address;
12400 record_buf_mem[memory_index + 2] = 4;
12401 record_buf_mem[memory_index + 3] = start_address + 4;
12402 start_address = start_address + 8;
12403 memory_index = memory_index + 4;
12404 }
12405 memory_count--;
12406 }
12407 arm_insn_r->mem_rec_count = (memory_index >> 1);
12408 }
12409 /* Handle VLDM instructions. */
12410 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12411 || op_vldm_vstm == 0x13)
12412 {
12413 uint32_t reg_count, reg_vd;
12414 uint32_t reg_index = 0;
12415 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12416
12417 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12418 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12419
12420 /* REG_VD is the first D register number. If the instruction
12421 loads memory to S registers (SINGLE_REG is TRUE), the register
12422 number is (REG_VD << 1 | bit D), so the corresponding D
12423 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12424 if (!single_reg)
12425 reg_vd = reg_vd | (bit_d << 4);
12426
12427 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12428 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12429
12430 /* If the instruction loads memory to D register, REG_COUNT should
12431 be divided by 2, according to the ARM Architecture Reference
12432 Manual. If the instruction loads memory to S register, divide by
12433 2 as well because two S registers are mapped to D register. */
12434 reg_count = reg_count / 2;
12435 if (single_reg && bit_d)
12436 {
12437 /* Increase the register count if S register list starts from
12438 an odd number (bit d is one). */
12439 reg_count++;
12440 }
12441
12442 while (reg_count > 0)
12443 {
12444 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12445 reg_count--;
12446 }
12447 arm_insn_r->reg_rec_count = reg_index;
12448 }
12449 /* VSTR Vector store register. */
12450 else if ((opcode & 0x13) == 0x10)
12451 {
12452 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12453 uint32_t memory_index = 0;
12454
12455 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12456 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12457 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12458 imm_off32 = imm_off8 << 2;
12459
12460 if (bit (arm_insn_r->arm_insn, 23))
12461 start_address = u_regval + imm_off32;
12462 else
12463 start_address = u_regval - imm_off32;
12464
12465 if (single_reg)
12466 {
12467 record_buf_mem[memory_index] = 4;
12468 record_buf_mem[memory_index + 1] = start_address;
12469 arm_insn_r->mem_rec_count = 1;
12470 }
12471 else
12472 {
12473 record_buf_mem[memory_index] = 4;
12474 record_buf_mem[memory_index + 1] = start_address;
12475 record_buf_mem[memory_index + 2] = 4;
12476 record_buf_mem[memory_index + 3] = start_address + 4;
12477 arm_insn_r->mem_rec_count = 2;
12478 }
12479 }
12480 /* VLDR Vector load register. */
12481 else if ((opcode & 0x13) == 0x11)
12482 {
12483 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12484
12485 if (!single_reg)
12486 {
12487 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12488 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12489 }
12490 else
12491 {
12492 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12493 /* Record register D rather than pseudo register S. */
12494 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12495 }
12496 arm_insn_r->reg_rec_count = 1;
12497 }
12498
12499 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12500 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12501 return 0;
12502 }
12503
12504 /* Record handler for arm/thumb mode VFP data processing instructions. */
12505
12506 static int
12507 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12508 {
12509 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12510 uint32_t record_buf[4];
12511 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12512 enum insn_types curr_insn_type = INSN_INV;
12513
12514 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12515 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12516 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12517 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12518 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12519 bit_d = bit (arm_insn_r->arm_insn, 22);
12520 /* Mask off the "D" bit. */
12521 opc1 = opc1 & ~0x04;
12522
12523 /* Handle VMLA, VMLS. */
12524 if (opc1 == 0x00)
12525 {
12526 if (bit (arm_insn_r->arm_insn, 10))
12527 {
12528 if (bit (arm_insn_r->arm_insn, 6))
12529 curr_insn_type = INSN_T0;
12530 else
12531 curr_insn_type = INSN_T1;
12532 }
12533 else
12534 {
12535 if (dp_op_sz)
12536 curr_insn_type = INSN_T1;
12537 else
12538 curr_insn_type = INSN_T2;
12539 }
12540 }
12541 /* Handle VNMLA, VNMLS, VNMUL. */
12542 else if (opc1 == 0x01)
12543 {
12544 if (dp_op_sz)
12545 curr_insn_type = INSN_T1;
12546 else
12547 curr_insn_type = INSN_T2;
12548 }
12549 /* Handle VMUL. */
12550 else if (opc1 == 0x02 && !(opc3 & 0x01))
12551 {
12552 if (bit (arm_insn_r->arm_insn, 10))
12553 {
12554 if (bit (arm_insn_r->arm_insn, 6))
12555 curr_insn_type = INSN_T0;
12556 else
12557 curr_insn_type = INSN_T1;
12558 }
12559 else
12560 {
12561 if (dp_op_sz)
12562 curr_insn_type = INSN_T1;
12563 else
12564 curr_insn_type = INSN_T2;
12565 }
12566 }
12567 /* Handle VADD, VSUB. */
12568 else if (opc1 == 0x03)
12569 {
12570 if (!bit (arm_insn_r->arm_insn, 9))
12571 {
12572 if (bit (arm_insn_r->arm_insn, 6))
12573 curr_insn_type = INSN_T0;
12574 else
12575 curr_insn_type = INSN_T1;
12576 }
12577 else
12578 {
12579 if (dp_op_sz)
12580 curr_insn_type = INSN_T1;
12581 else
12582 curr_insn_type = INSN_T2;
12583 }
12584 }
12585 /* Handle VDIV. */
12586 else if (opc1 == 0x08)
12587 {
12588 if (dp_op_sz)
12589 curr_insn_type = INSN_T1;
12590 else
12591 curr_insn_type = INSN_T2;
12592 }
12593 /* Handle all other vfp data processing instructions. */
12594 else if (opc1 == 0x0b)
12595 {
12596 /* Handle VMOV. */
12597 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12598 {
12599 if (bit (arm_insn_r->arm_insn, 4))
12600 {
12601 if (bit (arm_insn_r->arm_insn, 6))
12602 curr_insn_type = INSN_T0;
12603 else
12604 curr_insn_type = INSN_T1;
12605 }
12606 else
12607 {
12608 if (dp_op_sz)
12609 curr_insn_type = INSN_T1;
12610 else
12611 curr_insn_type = INSN_T2;
12612 }
12613 }
12614 /* Handle VNEG and VABS. */
12615 else if ((opc2 == 0x01 && opc3 == 0x01)
12616 || (opc2 == 0x00 && opc3 == 0x03))
12617 {
12618 if (!bit (arm_insn_r->arm_insn, 11))
12619 {
12620 if (bit (arm_insn_r->arm_insn, 6))
12621 curr_insn_type = INSN_T0;
12622 else
12623 curr_insn_type = INSN_T1;
12624 }
12625 else
12626 {
12627 if (dp_op_sz)
12628 curr_insn_type = INSN_T1;
12629 else
12630 curr_insn_type = INSN_T2;
12631 }
12632 }
12633 /* Handle VSQRT. */
12634 else if (opc2 == 0x01 && opc3 == 0x03)
12635 {
12636 if (dp_op_sz)
12637 curr_insn_type = INSN_T1;
12638 else
12639 curr_insn_type = INSN_T2;
12640 }
12641 /* Handle VCVT. */
12642 else if (opc2 == 0x07 && opc3 == 0x03)
12643 {
12644 if (!dp_op_sz)
12645 curr_insn_type = INSN_T1;
12646 else
12647 curr_insn_type = INSN_T2;
12648 }
12649 else if (opc3 & 0x01)
12650 {
12651 /* Handle VCVT. */
12652 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12653 {
12654 if (!bit (arm_insn_r->arm_insn, 18))
12655 curr_insn_type = INSN_T2;
12656 else
12657 {
12658 if (dp_op_sz)
12659 curr_insn_type = INSN_T1;
12660 else
12661 curr_insn_type = INSN_T2;
12662 }
12663 }
12664 /* Handle VCVT. */
12665 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12666 {
12667 if (dp_op_sz)
12668 curr_insn_type = INSN_T1;
12669 else
12670 curr_insn_type = INSN_T2;
12671 }
12672 /* Handle VCVTB, VCVTT. */
12673 else if ((opc2 & 0x0e) == 0x02)
12674 curr_insn_type = INSN_T2;
12675 /* Handle VCMP, VCMPE. */
12676 else if ((opc2 & 0x0e) == 0x04)
12677 curr_insn_type = INSN_T3;
12678 }
12679 }
12680
12681 switch (curr_insn_type)
12682 {
12683 case INSN_T0:
12684 reg_vd = reg_vd | (bit_d << 4);
12685 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12686 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12687 arm_insn_r->reg_rec_count = 2;
12688 break;
12689
12690 case INSN_T1:
12691 reg_vd = reg_vd | (bit_d << 4);
12692 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12693 arm_insn_r->reg_rec_count = 1;
12694 break;
12695
12696 case INSN_T2:
12697 reg_vd = (reg_vd << 1) | bit_d;
12698 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12699 arm_insn_r->reg_rec_count = 1;
12700 break;
12701
12702 case INSN_T3:
12703 record_buf[0] = ARM_FPSCR_REGNUM;
12704 arm_insn_r->reg_rec_count = 1;
12705 break;
12706
12707 default:
12708 gdb_assert_not_reached ("no decoding pattern found");
12709 break;
12710 }
12711
12712 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12713 return 0;
12714 }
12715
12716 /* Handling opcode 110 insns. */
12717
12718 static int
12719 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
12720 {
12721 uint32_t op1, op1_ebit, coproc;
12722
12723 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12724 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12725 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12726
12727 if ((coproc & 0x0e) == 0x0a)
12728 {
12729 /* Handle extension register ld/st instructions. */
12730 if (!(op1 & 0x20))
12731 return arm_record_exreg_ld_st_insn (arm_insn_r);
12732
12733 /* 64-bit transfers between arm core and extension registers. */
12734 if ((op1 & 0x3e) == 0x04)
12735 return arm_record_exreg_ld_st_insn (arm_insn_r);
12736 }
12737 else
12738 {
12739 /* Handle coprocessor ld/st instructions. */
12740 if (!(op1 & 0x3a))
12741 {
12742 /* Store. */
12743 if (!op1_ebit)
12744 return arm_record_unsupported_insn (arm_insn_r);
12745 else
12746 /* Load. */
12747 return arm_record_unsupported_insn (arm_insn_r);
12748 }
12749
12750 /* Move to coprocessor from two arm core registers. */
12751 if (op1 == 0x4)
12752 return arm_record_unsupported_insn (arm_insn_r);
12753
12754 /* Move to two arm core registers from coprocessor. */
12755 if (op1 == 0x5)
12756 {
12757 uint32_t reg_t[2];
12758
12759 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12760 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12761 arm_insn_r->reg_rec_count = 2;
12762
12763 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12764 return 0;
12765 }
12766 }
12767 return arm_record_unsupported_insn (arm_insn_r);
12768 }
12769
12770 /* Handling opcode 111 insns. */
12771
12772 static int
12773 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
12774 {
12775 uint32_t op, op1_ebit, coproc, bits_24_25;
12776 arm_gdbarch_tdep *tdep
12777 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
12778 struct regcache *reg_cache = arm_insn_r->regcache;
12779
12780 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12781 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12782 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12783 op = bit (arm_insn_r->arm_insn, 4);
12784 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
12785
12786 /* Handle arm SWI/SVC system call instructions. */
12787 if (bits_24_25 == 0x3)
12788 {
12789 if (tdep->arm_syscall_record != NULL)
12790 {
12791 ULONGEST svc_operand, svc_number;
12792
12793 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12794
12795 if (svc_operand) /* OABI. */
12796 svc_number = svc_operand - 0x900000;
12797 else /* EABI. */
12798 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12799
12800 return tdep->arm_syscall_record (reg_cache, svc_number);
12801 }
12802 else
12803 {
12804 gdb_printf (gdb_stderr, _("no syscall record support\n"));
12805 return -1;
12806 }
12807 }
12808 else if (bits_24_25 == 0x02)
12809 {
12810 if (op)
12811 {
12812 if ((coproc & 0x0e) == 0x0a)
12813 {
12814 /* 8, 16, and 32-bit transfer */
12815 return arm_record_vdata_transfer_insn (arm_insn_r);
12816 }
12817 else
12818 {
12819 if (op1_ebit)
12820 {
12821 /* MRC, MRC2 */
12822 uint32_t record_buf[1];
12823
12824 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12825 if (record_buf[0] == 15)
12826 record_buf[0] = ARM_PS_REGNUM;
12827
12828 arm_insn_r->reg_rec_count = 1;
12829 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12830 record_buf);
12831 return 0;
12832 }
12833 else
12834 {
12835 /* MCR, MCR2 */
12836 return -1;
12837 }
12838 }
12839 }
12840 else
12841 {
12842 if ((coproc & 0x0e) == 0x0a)
12843 {
12844 /* VFP data-processing instructions. */
12845 return arm_record_vfp_data_proc_insn (arm_insn_r);
12846 }
12847 else
12848 {
12849 /* CDP, CDP2 */
12850 return -1;
12851 }
12852 }
12853 }
12854 else
12855 {
12856 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
12857
12858 if (op1 == 5)
12859 {
12860 if ((coproc & 0x0e) != 0x0a)
12861 {
12862 /* MRRC, MRRC2 */
12863 return -1;
12864 }
12865 }
12866 else if (op1 == 4 || op1 == 5)
12867 {
12868 if ((coproc & 0x0e) == 0x0a)
12869 {
12870 /* 64-bit transfers between ARM core and extension */
12871 return -1;
12872 }
12873 else if (op1 == 4)
12874 {
12875 /* MCRR, MCRR2 */
12876 return -1;
12877 }
12878 }
12879 else if (op1 == 0 || op1 == 1)
12880 {
12881 /* UNDEFINED */
12882 return -1;
12883 }
12884 else
12885 {
12886 if ((coproc & 0x0e) == 0x0a)
12887 {
12888 /* Extension register load/store */
12889 }
12890 else
12891 {
12892 /* STC, STC2, LDC, LDC2 */
12893 }
12894 return -1;
12895 }
12896 }
12897
12898 return -1;
12899 }
12900
12901 /* Handling opcode 000 insns. */
12902
12903 static int
12904 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
12905 {
12906 uint32_t record_buf[8];
12907 uint32_t reg_src1 = 0;
12908
12909 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12910
12911 record_buf[0] = ARM_PS_REGNUM;
12912 record_buf[1] = reg_src1;
12913 thumb_insn_r->reg_rec_count = 2;
12914
12915 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12916
12917 return 0;
12918 }
12919
12920
12921 /* Handling opcode 001 insns. */
12922
12923 static int
12924 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
12925 {
12926 uint32_t record_buf[8];
12927 uint32_t reg_src1 = 0;
12928
12929 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12930
12931 record_buf[0] = ARM_PS_REGNUM;
12932 record_buf[1] = reg_src1;
12933 thumb_insn_r->reg_rec_count = 2;
12934
12935 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12936
12937 return 0;
12938 }
12939
12940 /* Handling opcode 010 insns. */
12941
12942 static int
12943 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
12944 {
12945 struct regcache *reg_cache = thumb_insn_r->regcache;
12946 uint32_t record_buf[8], record_buf_mem[8];
12947
12948 uint32_t reg_src1 = 0, reg_src2 = 0;
12949 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12950
12951 ULONGEST u_regval[2] = {0};
12952
12953 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12954
12955 if (bit (thumb_insn_r->arm_insn, 12))
12956 {
12957 /* Handle load/store register offset. */
12958 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12959
12960 if (in_inclusive_range (opB, 4U, 7U))
12961 {
12962 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12963 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12964 record_buf[0] = reg_src1;
12965 thumb_insn_r->reg_rec_count = 1;
12966 }
12967 else if (in_inclusive_range (opB, 0U, 2U))
12968 {
12969 /* STR(2), STRB(2), STRH(2) . */
12970 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12971 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12972 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12973 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12974 if (0 == opB)
12975 record_buf_mem[0] = 4; /* STR (2). */
12976 else if (2 == opB)
12977 record_buf_mem[0] = 1; /* STRB (2). */
12978 else if (1 == opB)
12979 record_buf_mem[0] = 2; /* STRH (2). */
12980 record_buf_mem[1] = u_regval[0] + u_regval[1];
12981 thumb_insn_r->mem_rec_count = 1;
12982 }
12983 }
12984 else if (bit (thumb_insn_r->arm_insn, 11))
12985 {
12986 /* Handle load from literal pool. */
12987 /* LDR(3). */
12988 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12989 record_buf[0] = reg_src1;
12990 thumb_insn_r->reg_rec_count = 1;
12991 }
12992 else if (opcode1)
12993 {
12994 /* Special data instructions and branch and exchange */
12995 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12996 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12997 if ((3 == opcode2) && (!opcode3))
12998 {
12999 /* Branch with exchange. */
13000 record_buf[0] = ARM_PS_REGNUM;
13001 thumb_insn_r->reg_rec_count = 1;
13002 }
13003 else
13004 {
13005 /* Format 8; special data processing insns. */
13006 record_buf[0] = ARM_PS_REGNUM;
13007 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13008 | bits (thumb_insn_r->arm_insn, 0, 2));
13009 thumb_insn_r->reg_rec_count = 2;
13010 }
13011 }
13012 else
13013 {
13014 /* Format 5; data processing insns. */
13015 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13016 if (bit (thumb_insn_r->arm_insn, 7))
13017 {
13018 reg_src1 = reg_src1 + 8;
13019 }
13020 record_buf[0] = ARM_PS_REGNUM;
13021 record_buf[1] = reg_src1;
13022 thumb_insn_r->reg_rec_count = 2;
13023 }
13024
13025 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13026 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13027 record_buf_mem);
13028
13029 return 0;
13030 }
13031
13032 /* Handling opcode 001 insns. */
13033
13034 static int
13035 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13036 {
13037 struct regcache *reg_cache = thumb_insn_r->regcache;
13038 uint32_t record_buf[8], record_buf_mem[8];
13039
13040 uint32_t reg_src1 = 0;
13041 uint32_t opcode = 0, immed_5 = 0;
13042
13043 ULONGEST u_regval = 0;
13044
13045 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13046
13047 if (opcode)
13048 {
13049 /* LDR(1). */
13050 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13051 record_buf[0] = reg_src1;
13052 thumb_insn_r->reg_rec_count = 1;
13053 }
13054 else
13055 {
13056 /* STR(1). */
13057 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13058 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13059 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13060 record_buf_mem[0] = 4;
13061 record_buf_mem[1] = u_regval + (immed_5 * 4);
13062 thumb_insn_r->mem_rec_count = 1;
13063 }
13064
13065 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13066 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13067 record_buf_mem);
13068
13069 return 0;
13070 }
13071
13072 /* Handling opcode 100 insns. */
13073
13074 static int
13075 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13076 {
13077 struct regcache *reg_cache = thumb_insn_r->regcache;
13078 uint32_t record_buf[8], record_buf_mem[8];
13079
13080 uint32_t reg_src1 = 0;
13081 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13082
13083 ULONGEST u_regval = 0;
13084
13085 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13086
13087 if (3 == opcode)
13088 {
13089 /* LDR(4). */
13090 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13091 record_buf[0] = reg_src1;
13092 thumb_insn_r->reg_rec_count = 1;
13093 }
13094 else if (1 == opcode)
13095 {
13096 /* LDRH(1). */
13097 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13098 record_buf[0] = reg_src1;
13099 thumb_insn_r->reg_rec_count = 1;
13100 }
13101 else if (2 == opcode)
13102 {
13103 /* STR(3). */
13104 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13105 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13106 record_buf_mem[0] = 4;
13107 record_buf_mem[1] = u_regval + (immed_8 * 4);
13108 thumb_insn_r->mem_rec_count = 1;
13109 }
13110 else if (0 == opcode)
13111 {
13112 /* STRH(1). */
13113 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13114 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13115 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13116 record_buf_mem[0] = 2;
13117 record_buf_mem[1] = u_regval + (immed_5 * 2);
13118 thumb_insn_r->mem_rec_count = 1;
13119 }
13120
13121 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13122 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13123 record_buf_mem);
13124
13125 return 0;
13126 }
13127
13128 /* Handling opcode 101 insns. */
13129
13130 static int
13131 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13132 {
13133 struct regcache *reg_cache = thumb_insn_r->regcache;
13134
13135 uint32_t opcode = 0;
13136 uint32_t register_bits = 0, register_count = 0;
13137 uint32_t index = 0, start_address = 0;
13138 uint32_t record_buf[24], record_buf_mem[48];
13139 uint32_t reg_src1;
13140
13141 ULONGEST u_regval = 0;
13142
13143 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13144
13145 if (opcode == 0 || opcode == 1)
13146 {
13147 /* ADR and ADD (SP plus immediate) */
13148
13149 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13150 record_buf[0] = reg_src1;
13151 thumb_insn_r->reg_rec_count = 1;
13152 }
13153 else
13154 {
13155 /* Miscellaneous 16-bit instructions */
13156 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13157
13158 switch (opcode2)
13159 {
13160 case 6:
13161 /* SETEND and CPS */
13162 break;
13163 case 0:
13164 /* ADD/SUB (SP plus immediate) */
13165 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13166 record_buf[0] = ARM_SP_REGNUM;
13167 thumb_insn_r->reg_rec_count = 1;
13168 break;
13169 case 1: /* fall through */
13170 case 3: /* fall through */
13171 case 9: /* fall through */
13172 case 11:
13173 /* CBNZ, CBZ */
13174 break;
13175 case 2:
13176 /* SXTH, SXTB, UXTH, UXTB */
13177 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13178 thumb_insn_r->reg_rec_count = 1;
13179 break;
13180 case 4: /* fall through */
13181 case 5:
13182 /* PUSH. */
13183 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13184 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13185 while (register_bits)
13186 {
13187 if (register_bits & 0x00000001)
13188 register_count++;
13189 register_bits = register_bits >> 1;
13190 }
13191 start_address = u_regval - \
13192 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13193 thumb_insn_r->mem_rec_count = register_count;
13194 while (register_count)
13195 {
13196 record_buf_mem[(register_count * 2) - 1] = start_address;
13197 record_buf_mem[(register_count * 2) - 2] = 4;
13198 start_address = start_address + 4;
13199 register_count--;
13200 }
13201 record_buf[0] = ARM_SP_REGNUM;
13202 thumb_insn_r->reg_rec_count = 1;
13203 break;
13204 case 10:
13205 /* REV, REV16, REVSH */
13206 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13207 thumb_insn_r->reg_rec_count = 1;
13208 break;
13209 case 12: /* fall through */
13210 case 13:
13211 /* POP. */
13212 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13213 while (register_bits)
13214 {
13215 if (register_bits & 0x00000001)
13216 record_buf[index++] = register_count;
13217 register_bits = register_bits >> 1;
13218 register_count++;
13219 }
13220 record_buf[index++] = ARM_PS_REGNUM;
13221 record_buf[index++] = ARM_SP_REGNUM;
13222 thumb_insn_r->reg_rec_count = index;
13223 break;
13224 case 0xe:
13225 /* BKPT insn. */
13226 /* Handle enhanced software breakpoint insn, BKPT. */
13227 /* CPSR is changed to be executed in ARM state, disabling normal
13228 interrupts, entering abort mode. */
13229 /* According to high vector configuration PC is set. */
13230 /* User hits breakpoint and type reverse, in that case, we need to go back with
13231 previous CPSR and Program Counter. */
13232 record_buf[0] = ARM_PS_REGNUM;
13233 record_buf[1] = ARM_LR_REGNUM;
13234 thumb_insn_r->reg_rec_count = 2;
13235 /* We need to save SPSR value, which is not yet done. */
13236 gdb_printf (gdb_stderr,
13237 _("Process record does not support instruction "
13238 "0x%0x at address %s.\n"),
13239 thumb_insn_r->arm_insn,
13240 paddress (thumb_insn_r->gdbarch,
13241 thumb_insn_r->this_addr));
13242 return -1;
13243
13244 case 0xf:
13245 /* If-Then, and hints */
13246 break;
13247 default:
13248 return -1;
13249 };
13250 }
13251
13252 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13253 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13254 record_buf_mem);
13255
13256 return 0;
13257 }
13258
13259 /* Handling opcode 110 insns. */
13260
13261 static int
13262 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13263 {
13264 arm_gdbarch_tdep *tdep
13265 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
13266 struct regcache *reg_cache = thumb_insn_r->regcache;
13267
13268 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13269 uint32_t reg_src1 = 0;
13270 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13271 uint32_t index = 0, start_address = 0;
13272 uint32_t record_buf[24], record_buf_mem[48];
13273
13274 ULONGEST u_regval = 0;
13275
13276 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13277 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13278
13279 if (1 == opcode2)
13280 {
13281
13282 /* LDMIA. */
13283 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13284 /* Get Rn. */
13285 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13286 while (register_bits)
13287 {
13288 if (register_bits & 0x00000001)
13289 record_buf[index++] = register_count;
13290 register_bits = register_bits >> 1;
13291 register_count++;
13292 }
13293 record_buf[index++] = reg_src1;
13294 thumb_insn_r->reg_rec_count = index;
13295 }
13296 else if (0 == opcode2)
13297 {
13298 /* It handles both STMIA. */
13299 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13300 /* Get Rn. */
13301 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13302 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13303 while (register_bits)
13304 {
13305 if (register_bits & 0x00000001)
13306 register_count++;
13307 register_bits = register_bits >> 1;
13308 }
13309 start_address = u_regval;
13310 thumb_insn_r->mem_rec_count = register_count;
13311 while (register_count)
13312 {
13313 record_buf_mem[(register_count * 2) - 1] = start_address;
13314 record_buf_mem[(register_count * 2) - 2] = 4;
13315 start_address = start_address + 4;
13316 register_count--;
13317 }
13318 }
13319 else if (0x1F == opcode1)
13320 {
13321 /* Handle arm syscall insn. */
13322 if (tdep->arm_syscall_record != NULL)
13323 {
13324 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13325 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13326 }
13327 else
13328 {
13329 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13330 return -1;
13331 }
13332 }
13333
13334 /* B (1), conditional branch is automatically taken care in process_record,
13335 as PC is saved there. */
13336
13337 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13338 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13339 record_buf_mem);
13340
13341 return ret;
13342 }
13343
13344 /* Handling opcode 111 insns. */
13345
13346 static int
13347 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13348 {
13349 uint32_t record_buf[8];
13350 uint32_t bits_h = 0;
13351
13352 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13353
13354 if (2 == bits_h || 3 == bits_h)
13355 {
13356 /* BL */
13357 record_buf[0] = ARM_LR_REGNUM;
13358 thumb_insn_r->reg_rec_count = 1;
13359 }
13360 else if (1 == bits_h)
13361 {
13362 /* BLX(1). */
13363 record_buf[0] = ARM_PS_REGNUM;
13364 record_buf[1] = ARM_LR_REGNUM;
13365 thumb_insn_r->reg_rec_count = 2;
13366 }
13367
13368 /* B(2) is automatically taken care in process_record, as PC is
13369 saved there. */
13370
13371 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13372
13373 return 0;
13374 }
13375
13376 /* Handler for thumb2 load/store multiple instructions. */
13377
13378 static int
13379 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13380 {
13381 struct regcache *reg_cache = thumb2_insn_r->regcache;
13382
13383 uint32_t reg_rn, op;
13384 uint32_t register_bits = 0, register_count = 0;
13385 uint32_t index = 0, start_address = 0;
13386 uint32_t record_buf[24], record_buf_mem[48];
13387
13388 ULONGEST u_regval = 0;
13389
13390 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13391 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13392
13393 if (0 == op || 3 == op)
13394 {
13395 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13396 {
13397 /* Handle RFE instruction. */
13398 record_buf[0] = ARM_PS_REGNUM;
13399 thumb2_insn_r->reg_rec_count = 1;
13400 }
13401 else
13402 {
13403 /* Handle SRS instruction after reading banked SP. */
13404 return arm_record_unsupported_insn (thumb2_insn_r);
13405 }
13406 }
13407 else if (1 == op || 2 == op)
13408 {
13409 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13410 {
13411 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13412 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13413 while (register_bits)
13414 {
13415 if (register_bits & 0x00000001)
13416 record_buf[index++] = register_count;
13417
13418 register_count++;
13419 register_bits = register_bits >> 1;
13420 }
13421 record_buf[index++] = reg_rn;
13422 record_buf[index++] = ARM_PS_REGNUM;
13423 thumb2_insn_r->reg_rec_count = index;
13424 }
13425 else
13426 {
13427 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13428 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13429 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13430 while (register_bits)
13431 {
13432 if (register_bits & 0x00000001)
13433 register_count++;
13434
13435 register_bits = register_bits >> 1;
13436 }
13437
13438 if (1 == op)
13439 {
13440 /* Start address calculation for LDMDB/LDMEA. */
13441 start_address = u_regval;
13442 }
13443 else if (2 == op)
13444 {
13445 /* Start address calculation for LDMDB/LDMEA. */
13446 start_address = u_regval - register_count * 4;
13447 }
13448
13449 thumb2_insn_r->mem_rec_count = register_count;
13450 while (register_count)
13451 {
13452 record_buf_mem[register_count * 2 - 1] = start_address;
13453 record_buf_mem[register_count * 2 - 2] = 4;
13454 start_address = start_address + 4;
13455 register_count--;
13456 }
13457 record_buf[0] = reg_rn;
13458 record_buf[1] = ARM_PS_REGNUM;
13459 thumb2_insn_r->reg_rec_count = 2;
13460 }
13461 }
13462
13463 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13464 record_buf_mem);
13465 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13466 record_buf);
13467 return ARM_RECORD_SUCCESS;
13468 }
13469
13470 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13471 instructions. */
13472
13473 static int
13474 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13475 {
13476 struct regcache *reg_cache = thumb2_insn_r->regcache;
13477
13478 uint32_t reg_rd, reg_rn, offset_imm;
13479 uint32_t reg_dest1, reg_dest2;
13480 uint32_t address, offset_addr;
13481 uint32_t record_buf[8], record_buf_mem[8];
13482 uint32_t op1, op2, op3;
13483
13484 ULONGEST u_regval[2];
13485
13486 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13487 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13488 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13489
13490 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13491 {
13492 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13493 {
13494 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13495 record_buf[0] = reg_dest1;
13496 record_buf[1] = ARM_PS_REGNUM;
13497 thumb2_insn_r->reg_rec_count = 2;
13498 }
13499
13500 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13501 {
13502 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13503 record_buf[2] = reg_dest2;
13504 thumb2_insn_r->reg_rec_count = 3;
13505 }
13506 }
13507 else
13508 {
13509 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13510 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13511
13512 if (0 == op1 && 0 == op2)
13513 {
13514 /* Handle STREX. */
13515 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13516 address = u_regval[0] + (offset_imm * 4);
13517 record_buf_mem[0] = 4;
13518 record_buf_mem[1] = address;
13519 thumb2_insn_r->mem_rec_count = 1;
13520 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13521 record_buf[0] = reg_rd;
13522 thumb2_insn_r->reg_rec_count = 1;
13523 }
13524 else if (1 == op1 && 0 == op2)
13525 {
13526 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13527 record_buf[0] = reg_rd;
13528 thumb2_insn_r->reg_rec_count = 1;
13529 address = u_regval[0];
13530 record_buf_mem[1] = address;
13531
13532 if (4 == op3)
13533 {
13534 /* Handle STREXB. */
13535 record_buf_mem[0] = 1;
13536 thumb2_insn_r->mem_rec_count = 1;
13537 }
13538 else if (5 == op3)
13539 {
13540 /* Handle STREXH. */
13541 record_buf_mem[0] = 2 ;
13542 thumb2_insn_r->mem_rec_count = 1;
13543 }
13544 else if (7 == op3)
13545 {
13546 /* Handle STREXD. */
13547 address = u_regval[0];
13548 record_buf_mem[0] = 4;
13549 record_buf_mem[2] = 4;
13550 record_buf_mem[3] = address + 4;
13551 thumb2_insn_r->mem_rec_count = 2;
13552 }
13553 }
13554 else
13555 {
13556 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13557
13558 if (bit (thumb2_insn_r->arm_insn, 24))
13559 {
13560 if (bit (thumb2_insn_r->arm_insn, 23))
13561 offset_addr = u_regval[0] + (offset_imm * 4);
13562 else
13563 offset_addr = u_regval[0] - (offset_imm * 4);
13564
13565 address = offset_addr;
13566 }
13567 else
13568 address = u_regval[0];
13569
13570 record_buf_mem[0] = 4;
13571 record_buf_mem[1] = address;
13572 record_buf_mem[2] = 4;
13573 record_buf_mem[3] = address + 4;
13574 thumb2_insn_r->mem_rec_count = 2;
13575 record_buf[0] = reg_rn;
13576 thumb2_insn_r->reg_rec_count = 1;
13577 }
13578 }
13579
13580 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13581 record_buf);
13582 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13583 record_buf_mem);
13584 return ARM_RECORD_SUCCESS;
13585 }
13586
13587 /* Handler for thumb2 data processing (shift register and modified immediate)
13588 instructions. */
13589
13590 static int
13591 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13592 {
13593 uint32_t reg_rd, op;
13594 uint32_t record_buf[8];
13595
13596 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13597 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13598
13599 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13600 {
13601 record_buf[0] = ARM_PS_REGNUM;
13602 thumb2_insn_r->reg_rec_count = 1;
13603 }
13604 else
13605 {
13606 record_buf[0] = reg_rd;
13607 record_buf[1] = ARM_PS_REGNUM;
13608 thumb2_insn_r->reg_rec_count = 2;
13609 }
13610
13611 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13612 record_buf);
13613 return ARM_RECORD_SUCCESS;
13614 }
13615
13616 /* Generic handler for thumb2 instructions which effect destination and PS
13617 registers. */
13618
13619 static int
13620 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13621 {
13622 uint32_t reg_rd;
13623 uint32_t record_buf[8];
13624
13625 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13626
13627 record_buf[0] = reg_rd;
13628 record_buf[1] = ARM_PS_REGNUM;
13629 thumb2_insn_r->reg_rec_count = 2;
13630
13631 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13632 record_buf);
13633 return ARM_RECORD_SUCCESS;
13634 }
13635
13636 /* Handler for thumb2 branch and miscellaneous control instructions. */
13637
13638 static int
13639 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13640 {
13641 uint32_t op, op1, op2;
13642 uint32_t record_buf[8];
13643
13644 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13645 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13646 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13647
13648 /* Handle MSR insn. */
13649 if (!(op1 & 0x2) && 0x38 == op)
13650 {
13651 if (!(op2 & 0x3))
13652 {
13653 /* CPSR is going to be changed. */
13654 record_buf[0] = ARM_PS_REGNUM;
13655 thumb2_insn_r->reg_rec_count = 1;
13656 }
13657 else
13658 {
13659 arm_record_unsupported_insn(thumb2_insn_r);
13660 return -1;
13661 }
13662 }
13663 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13664 {
13665 /* BLX. */
13666 record_buf[0] = ARM_PS_REGNUM;
13667 record_buf[1] = ARM_LR_REGNUM;
13668 thumb2_insn_r->reg_rec_count = 2;
13669 }
13670
13671 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13672 record_buf);
13673 return ARM_RECORD_SUCCESS;
13674 }
13675
13676 /* Handler for thumb2 store single data item instructions. */
13677
13678 static int
13679 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
13680 {
13681 struct regcache *reg_cache = thumb2_insn_r->regcache;
13682
13683 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13684 uint32_t address, offset_addr;
13685 uint32_t record_buf[8], record_buf_mem[8];
13686 uint32_t op1, op2;
13687
13688 ULONGEST u_regval[2];
13689
13690 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13691 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13692 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13693 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13694
13695 if (bit (thumb2_insn_r->arm_insn, 23))
13696 {
13697 /* T2 encoding. */
13698 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13699 offset_addr = u_regval[0] + offset_imm;
13700 address = offset_addr;
13701 }
13702 else
13703 {
13704 /* T3 encoding. */
13705 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13706 {
13707 /* Handle STRB (register). */
13708 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13709 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13710 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13711 offset_addr = u_regval[1] << shift_imm;
13712 address = u_regval[0] + offset_addr;
13713 }
13714 else
13715 {
13716 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13717 if (bit (thumb2_insn_r->arm_insn, 10))
13718 {
13719 if (bit (thumb2_insn_r->arm_insn, 9))
13720 offset_addr = u_regval[0] + offset_imm;
13721 else
13722 offset_addr = u_regval[0] - offset_imm;
13723
13724 address = offset_addr;
13725 }
13726 else
13727 address = u_regval[0];
13728 }
13729 }
13730
13731 switch (op1)
13732 {
13733 /* Store byte instructions. */
13734 case 4:
13735 case 0:
13736 record_buf_mem[0] = 1;
13737 break;
13738 /* Store half word instructions. */
13739 case 1:
13740 case 5:
13741 record_buf_mem[0] = 2;
13742 break;
13743 /* Store word instructions. */
13744 case 2:
13745 case 6:
13746 record_buf_mem[0] = 4;
13747 break;
13748
13749 default:
13750 gdb_assert_not_reached ("no decoding pattern found");
13751 break;
13752 }
13753
13754 record_buf_mem[1] = address;
13755 thumb2_insn_r->mem_rec_count = 1;
13756 record_buf[0] = reg_rn;
13757 thumb2_insn_r->reg_rec_count = 1;
13758
13759 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13760 record_buf);
13761 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13762 record_buf_mem);
13763 return ARM_RECORD_SUCCESS;
13764 }
13765
13766 /* Handler for thumb2 load memory hints instructions. */
13767
13768 static int
13769 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
13770 {
13771 uint32_t record_buf[8];
13772 uint32_t reg_rt, reg_rn;
13773
13774 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13775 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13776
13777 if (ARM_PC_REGNUM != reg_rt)
13778 {
13779 record_buf[0] = reg_rt;
13780 record_buf[1] = reg_rn;
13781 record_buf[2] = ARM_PS_REGNUM;
13782 thumb2_insn_r->reg_rec_count = 3;
13783
13784 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13785 record_buf);
13786 return ARM_RECORD_SUCCESS;
13787 }
13788
13789 return ARM_RECORD_FAILURE;
13790 }
13791
13792 /* Handler for thumb2 load word instructions. */
13793
13794 static int
13795 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
13796 {
13797 uint32_t record_buf[8];
13798
13799 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13800 record_buf[1] = ARM_PS_REGNUM;
13801 thumb2_insn_r->reg_rec_count = 2;
13802
13803 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13804 record_buf);
13805 return ARM_RECORD_SUCCESS;
13806 }
13807
13808 /* Handler for thumb2 long multiply, long multiply accumulate, and
13809 divide instructions. */
13810
13811 static int
13812 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
13813 {
13814 uint32_t opcode1 = 0, opcode2 = 0;
13815 uint32_t record_buf[8];
13816
13817 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13818 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13819
13820 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13821 {
13822 /* Handle SMULL, UMULL, SMULAL. */
13823 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13824 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13825 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13826 record_buf[2] = ARM_PS_REGNUM;
13827 thumb2_insn_r->reg_rec_count = 3;
13828 }
13829 else if (1 == opcode1 || 3 == opcode2)
13830 {
13831 /* Handle SDIV and UDIV. */
13832 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13833 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13834 record_buf[2] = ARM_PS_REGNUM;
13835 thumb2_insn_r->reg_rec_count = 3;
13836 }
13837 else
13838 return ARM_RECORD_FAILURE;
13839
13840 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13841 record_buf);
13842 return ARM_RECORD_SUCCESS;
13843 }
13844
13845 /* Record handler for thumb32 coprocessor instructions. */
13846
13847 static int
13848 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
13849 {
13850 if (bit (thumb2_insn_r->arm_insn, 25))
13851 return arm_record_coproc_data_proc (thumb2_insn_r);
13852 else
13853 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13854 }
13855
13856 /* Record handler for advance SIMD structure load/store instructions. */
13857
13858 static int
13859 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
13860 {
13861 struct regcache *reg_cache = thumb2_insn_r->regcache;
13862 uint32_t l_bit, a_bit, b_bits;
13863 uint32_t record_buf[128], record_buf_mem[128];
13864 uint32_t reg_rn, reg_vd, address, f_elem;
13865 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13866 uint8_t f_ebytes;
13867
13868 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13869 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13870 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13871 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13872 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13873 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13874 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13875 f_elem = 8 / f_ebytes;
13876
13877 if (!l_bit)
13878 {
13879 ULONGEST u_regval = 0;
13880 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13881 address = u_regval;
13882
13883 if (!a_bit)
13884 {
13885 /* Handle VST1. */
13886 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13887 {
13888 if (b_bits == 0x07)
13889 bf_regs = 1;
13890 else if (b_bits == 0x0a)
13891 bf_regs = 2;
13892 else if (b_bits == 0x06)
13893 bf_regs = 3;
13894 else if (b_bits == 0x02)
13895 bf_regs = 4;
13896 else
13897 bf_regs = 0;
13898
13899 for (index_r = 0; index_r < bf_regs; index_r++)
13900 {
13901 for (index_e = 0; index_e < f_elem; index_e++)
13902 {
13903 record_buf_mem[index_m++] = f_ebytes;
13904 record_buf_mem[index_m++] = address;
13905 address = address + f_ebytes;
13906 thumb2_insn_r->mem_rec_count += 1;
13907 }
13908 }
13909 }
13910 /* Handle VST2. */
13911 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13912 {
13913 if (b_bits == 0x09 || b_bits == 0x08)
13914 bf_regs = 1;
13915 else if (b_bits == 0x03)
13916 bf_regs = 2;
13917 else
13918 bf_regs = 0;
13919
13920 for (index_r = 0; index_r < bf_regs; index_r++)
13921 for (index_e = 0; index_e < f_elem; index_e++)
13922 {
13923 for (loop_t = 0; loop_t < 2; loop_t++)
13924 {
13925 record_buf_mem[index_m++] = f_ebytes;
13926 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13927 thumb2_insn_r->mem_rec_count += 1;
13928 }
13929 address = address + (2 * f_ebytes);
13930 }
13931 }
13932 /* Handle VST3. */
13933 else if ((b_bits & 0x0e) == 0x04)
13934 {
13935 for (index_e = 0; index_e < f_elem; index_e++)
13936 {
13937 for (loop_t = 0; loop_t < 3; loop_t++)
13938 {
13939 record_buf_mem[index_m++] = f_ebytes;
13940 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13941 thumb2_insn_r->mem_rec_count += 1;
13942 }
13943 address = address + (3 * f_ebytes);
13944 }
13945 }
13946 /* Handle VST4. */
13947 else if (!(b_bits & 0x0e))
13948 {
13949 for (index_e = 0; index_e < f_elem; index_e++)
13950 {
13951 for (loop_t = 0; loop_t < 4; loop_t++)
13952 {
13953 record_buf_mem[index_m++] = f_ebytes;
13954 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13955 thumb2_insn_r->mem_rec_count += 1;
13956 }
13957 address = address + (4 * f_ebytes);
13958 }
13959 }
13960 }
13961 else
13962 {
13963 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13964
13965 if (bft_size == 0x00)
13966 f_ebytes = 1;
13967 else if (bft_size == 0x01)
13968 f_ebytes = 2;
13969 else if (bft_size == 0x02)
13970 f_ebytes = 4;
13971 else
13972 f_ebytes = 0;
13973
13974 /* Handle VST1. */
13975 if (!(b_bits & 0x0b) || b_bits == 0x08)
13976 thumb2_insn_r->mem_rec_count = 1;
13977 /* Handle VST2. */
13978 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13979 thumb2_insn_r->mem_rec_count = 2;
13980 /* Handle VST3. */
13981 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13982 thumb2_insn_r->mem_rec_count = 3;
13983 /* Handle VST4. */
13984 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13985 thumb2_insn_r->mem_rec_count = 4;
13986
13987 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13988 {
13989 record_buf_mem[index_m] = f_ebytes;
13990 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13991 }
13992 }
13993 }
13994 else
13995 {
13996 if (!a_bit)
13997 {
13998 /* Handle VLD1. */
13999 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14000 thumb2_insn_r->reg_rec_count = 1;
14001 /* Handle VLD2. */
14002 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14003 thumb2_insn_r->reg_rec_count = 2;
14004 /* Handle VLD3. */
14005 else if ((b_bits & 0x0e) == 0x04)
14006 thumb2_insn_r->reg_rec_count = 3;
14007 /* Handle VLD4. */
14008 else if (!(b_bits & 0x0e))
14009 thumb2_insn_r->reg_rec_count = 4;
14010 }
14011 else
14012 {
14013 /* Handle VLD1. */
14014 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14015 thumb2_insn_r->reg_rec_count = 1;
14016 /* Handle VLD2. */
14017 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14018 thumb2_insn_r->reg_rec_count = 2;
14019 /* Handle VLD3. */
14020 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14021 thumb2_insn_r->reg_rec_count = 3;
14022 /* Handle VLD4. */
14023 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14024 thumb2_insn_r->reg_rec_count = 4;
14025
14026 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14027 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14028 }
14029 }
14030
14031 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14032 {
14033 record_buf[index_r] = reg_rn;
14034 thumb2_insn_r->reg_rec_count += 1;
14035 }
14036
14037 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14038 record_buf);
14039 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14040 record_buf_mem);
14041 return 0;
14042 }
14043
14044 /* Decodes thumb2 instruction type and invokes its record handler. */
14045
14046 static unsigned int
14047 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14048 {
14049 uint32_t op, op1, op2;
14050
14051 op = bit (thumb2_insn_r->arm_insn, 15);
14052 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14053 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14054
14055 if (op1 == 0x01)
14056 {
14057 if (!(op2 & 0x64 ))
14058 {
14059 /* Load/store multiple instruction. */
14060 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14061 }
14062 else if ((op2 & 0x64) == 0x4)
14063 {
14064 /* Load/store (dual/exclusive) and table branch instruction. */
14065 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14066 }
14067 else if ((op2 & 0x60) == 0x20)
14068 {
14069 /* Data-processing (shifted register). */
14070 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14071 }
14072 else if (op2 & 0x40)
14073 {
14074 /* Co-processor instructions. */
14075 return thumb2_record_coproc_insn (thumb2_insn_r);
14076 }
14077 }
14078 else if (op1 == 0x02)
14079 {
14080 if (op)
14081 {
14082 /* Branches and miscellaneous control instructions. */
14083 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14084 }
14085 else if (op2 & 0x20)
14086 {
14087 /* Data-processing (plain binary immediate) instruction. */
14088 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14089 }
14090 else
14091 {
14092 /* Data-processing (modified immediate). */
14093 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14094 }
14095 }
14096 else if (op1 == 0x03)
14097 {
14098 if (!(op2 & 0x71 ))
14099 {
14100 /* Store single data item. */
14101 return thumb2_record_str_single_data (thumb2_insn_r);
14102 }
14103 else if (!((op2 & 0x71) ^ 0x10))
14104 {
14105 /* Advanced SIMD or structure load/store instructions. */
14106 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14107 }
14108 else if (!((op2 & 0x67) ^ 0x01))
14109 {
14110 /* Load byte, memory hints instruction. */
14111 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14112 }
14113 else if (!((op2 & 0x67) ^ 0x03))
14114 {
14115 /* Load halfword, memory hints instruction. */
14116 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14117 }
14118 else if (!((op2 & 0x67) ^ 0x05))
14119 {
14120 /* Load word instruction. */
14121 return thumb2_record_ld_word (thumb2_insn_r);
14122 }
14123 else if (!((op2 & 0x70) ^ 0x20))
14124 {
14125 /* Data-processing (register) instruction. */
14126 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14127 }
14128 else if (!((op2 & 0x78) ^ 0x30))
14129 {
14130 /* Multiply, multiply accumulate, abs diff instruction. */
14131 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14132 }
14133 else if (!((op2 & 0x78) ^ 0x38))
14134 {
14135 /* Long multiply, long multiply accumulate, and divide. */
14136 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14137 }
14138 else if (op2 & 0x40)
14139 {
14140 /* Co-processor instructions. */
14141 return thumb2_record_coproc_insn (thumb2_insn_r);
14142 }
14143 }
14144
14145 return -1;
14146 }
14147
14148 namespace {
14149 /* Abstract memory reader. */
14150
14151 class abstract_memory_reader
14152 {
14153 public:
14154 /* Read LEN bytes of target memory at address MEMADDR, placing the
14155 results in GDB's memory at BUF. Return true on success. */
14156
14157 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
14158 };
14159
14160 /* Instruction reader from real target. */
14161
14162 class instruction_reader : public abstract_memory_reader
14163 {
14164 public:
14165 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14166 {
14167 if (target_read_memory (memaddr, buf, len))
14168 return false;
14169 else
14170 return true;
14171 }
14172 };
14173
14174 } // namespace
14175
14176 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
14177 and positive val on failure. */
14178
14179 static int
14180 extract_arm_insn (abstract_memory_reader& reader,
14181 arm_insn_decode_record *insn_record, uint32_t insn_size)
14182 {
14183 gdb_byte buf[insn_size];
14184
14185 memset (&buf[0], 0, insn_size);
14186
14187 if (!reader.read (insn_record->this_addr, buf, insn_size))
14188 return 1;
14189 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
14190 insn_size,
14191 gdbarch_byte_order_for_code (insn_record->gdbarch));
14192 return 0;
14193 }
14194
14195 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14196
14197 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14198 dispatch it. */
14199
14200 static int
14201 decode_insn (abstract_memory_reader &reader,
14202 arm_insn_decode_record *arm_record,
14203 record_type_t record_type, uint32_t insn_size)
14204 {
14205
14206 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14207 instruction. */
14208 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14209 {
14210 arm_record_data_proc_misc_ld_str, /* 000. */
14211 arm_record_data_proc_imm, /* 001. */
14212 arm_record_ld_st_imm_offset, /* 010. */
14213 arm_record_ld_st_reg_offset, /* 011. */
14214 arm_record_ld_st_multiple, /* 100. */
14215 arm_record_b_bl, /* 101. */
14216 arm_record_asimd_vfp_coproc, /* 110. */
14217 arm_record_coproc_data_proc /* 111. */
14218 };
14219
14220 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14221 instruction. */
14222 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14223 { \
14224 thumb_record_shift_add_sub, /* 000. */
14225 thumb_record_add_sub_cmp_mov, /* 001. */
14226 thumb_record_ld_st_reg_offset, /* 010. */
14227 thumb_record_ld_st_imm_offset, /* 011. */
14228 thumb_record_ld_st_stack, /* 100. */
14229 thumb_record_misc, /* 101. */
14230 thumb_record_ldm_stm_swi, /* 110. */
14231 thumb_record_branch /* 111. */
14232 };
14233
14234 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14235 uint32_t insn_id = 0;
14236
14237 if (extract_arm_insn (reader, arm_record, insn_size))
14238 {
14239 if (record_debug)
14240 {
14241 gdb_printf (gdb_stdlog,
14242 _("Process record: error reading memory at "
14243 "addr %s len = %d.\n"),
14244 paddress (arm_record->gdbarch,
14245 arm_record->this_addr), insn_size);
14246 }
14247 return -1;
14248 }
14249 else if (ARM_RECORD == record_type)
14250 {
14251 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14252 insn_id = bits (arm_record->arm_insn, 25, 27);
14253
14254 if (arm_record->cond == 0xf)
14255 ret = arm_record_extension_space (arm_record);
14256 else
14257 {
14258 /* If this insn has fallen into extension space
14259 then we need not decode it anymore. */
14260 ret = arm_handle_insn[insn_id] (arm_record);
14261 }
14262 if (ret != ARM_RECORD_SUCCESS)
14263 {
14264 arm_record_unsupported_insn (arm_record);
14265 ret = -1;
14266 }
14267 }
14268 else if (THUMB_RECORD == record_type)
14269 {
14270 /* As thumb does not have condition codes, we set negative. */
14271 arm_record->cond = -1;
14272 insn_id = bits (arm_record->arm_insn, 13, 15);
14273 ret = thumb_handle_insn[insn_id] (arm_record);
14274 if (ret != ARM_RECORD_SUCCESS)
14275 {
14276 arm_record_unsupported_insn (arm_record);
14277 ret = -1;
14278 }
14279 }
14280 else if (THUMB2_RECORD == record_type)
14281 {
14282 /* As thumb does not have condition codes, we set negative. */
14283 arm_record->cond = -1;
14284
14285 /* Swap first half of 32bit thumb instruction with second half. */
14286 arm_record->arm_insn
14287 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14288
14289 ret = thumb2_record_decode_insn_handler (arm_record);
14290
14291 if (ret != ARM_RECORD_SUCCESS)
14292 {
14293 arm_record_unsupported_insn (arm_record);
14294 ret = -1;
14295 }
14296 }
14297 else
14298 {
14299 /* Throw assertion. */
14300 gdb_assert_not_reached ("not a valid instruction, could not decode");
14301 }
14302
14303 return ret;
14304 }
14305
14306 #if GDB_SELF_TEST
14307 namespace selftests {
14308
14309 /* Provide both 16-bit and 32-bit thumb instructions. */
14310
14311 class instruction_reader_thumb : public abstract_memory_reader
14312 {
14313 public:
14314 template<size_t SIZE>
14315 instruction_reader_thumb (enum bfd_endian endian,
14316 const uint16_t (&insns)[SIZE])
14317 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
14318 {}
14319
14320 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14321 {
14322 SELF_CHECK (len == 4 || len == 2);
14323 SELF_CHECK (memaddr % 2 == 0);
14324 SELF_CHECK ((memaddr / 2) < m_insns_size);
14325
14326 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
14327 if (len == 4)
14328 {
14329 store_unsigned_integer (&buf[2], 2, m_endian,
14330 m_insns[memaddr / 2 + 1]);
14331 }
14332 return true;
14333 }
14334
14335 private:
14336 enum bfd_endian m_endian;
14337 const uint16_t *m_insns;
14338 size_t m_insns_size;
14339 };
14340
14341 static void
14342 arm_record_test (void)
14343 {
14344 struct gdbarch_info info;
14345 info.bfd_arch_info = bfd_scan_arch ("arm");
14346
14347 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14348
14349 SELF_CHECK (gdbarch != NULL);
14350
14351 /* 16-bit Thumb instructions. */
14352 {
14353 arm_insn_decode_record arm_record;
14354
14355 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14356 arm_record.gdbarch = gdbarch;
14357
14358 static const uint16_t insns[] = {
14359 /* db b2 uxtb r3, r3 */
14360 0xb2db,
14361 /* cd 58 ldr r5, [r1, r3] */
14362 0x58cd,
14363 };
14364
14365 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14366 instruction_reader_thumb reader (endian, insns);
14367 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14368 THUMB_INSN_SIZE_BYTES);
14369
14370 SELF_CHECK (ret == 0);
14371 SELF_CHECK (arm_record.mem_rec_count == 0);
14372 SELF_CHECK (arm_record.reg_rec_count == 1);
14373 SELF_CHECK (arm_record.arm_regs[0] == 3);
14374
14375 arm_record.this_addr += 2;
14376 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14377 THUMB_INSN_SIZE_BYTES);
14378
14379 SELF_CHECK (ret == 0);
14380 SELF_CHECK (arm_record.mem_rec_count == 0);
14381 SELF_CHECK (arm_record.reg_rec_count == 1);
14382 SELF_CHECK (arm_record.arm_regs[0] == 5);
14383 }
14384
14385 /* 32-bit Thumb-2 instructions. */
14386 {
14387 arm_insn_decode_record arm_record;
14388
14389 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14390 arm_record.gdbarch = gdbarch;
14391
14392 static const uint16_t insns[] = {
14393 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
14394 0xee1d, 0x7f70,
14395 };
14396
14397 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14398 instruction_reader_thumb reader (endian, insns);
14399 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14400 THUMB2_INSN_SIZE_BYTES);
14401
14402 SELF_CHECK (ret == 0);
14403 SELF_CHECK (arm_record.mem_rec_count == 0);
14404 SELF_CHECK (arm_record.reg_rec_count == 1);
14405 SELF_CHECK (arm_record.arm_regs[0] == 7);
14406 }
14407 }
14408
14409 /* Instruction reader from manually cooked instruction sequences. */
14410
14411 class test_arm_instruction_reader : public arm_instruction_reader
14412 {
14413 public:
14414 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14415 : m_insns (insns)
14416 {}
14417
14418 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14419 {
14420 SELF_CHECK (memaddr % 4 == 0);
14421 SELF_CHECK (memaddr / 4 < m_insns.size ());
14422
14423 return m_insns[memaddr / 4];
14424 }
14425
14426 private:
14427 const gdb::array_view<const uint32_t> m_insns;
14428 };
14429
14430 static void
14431 arm_analyze_prologue_test ()
14432 {
14433 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14434 {
14435 struct gdbarch_info info;
14436 info.byte_order = endianness;
14437 info.byte_order_for_code = endianness;
14438 info.bfd_arch_info = bfd_scan_arch ("arm");
14439
14440 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14441
14442 SELF_CHECK (gdbarch != NULL);
14443
14444 /* The "sub" instruction contains an immediate value rotate count of 0,
14445 which resulted in a 32-bit shift of a 32-bit value, caught by
14446 UBSan. */
14447 const uint32_t insns[] = {
14448 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14449 0xe1a05000, /* mov r5, r0 */
14450 0xe5903020, /* ldr r3, [r0, #32] */
14451 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14452 };
14453
14454 test_arm_instruction_reader mem_reader (insns);
14455 arm_prologue_cache cache;
14456 arm_cache_init (&cache, gdbarch);
14457
14458 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14459 }
14460 }
14461
14462 } // namespace selftests
14463 #endif /* GDB_SELF_TEST */
14464
14465 /* Cleans up local record registers and memory allocations. */
14466
14467 static void
14468 deallocate_reg_mem (arm_insn_decode_record *record)
14469 {
14470 xfree (record->arm_regs);
14471 xfree (record->arm_mems);
14472 }
14473
14474
14475 /* Parse the current instruction and record the values of the registers and
14476 memory that will be changed in current instruction to record_arch_list".
14477 Return -1 if something is wrong. */
14478
14479 int
14480 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14481 CORE_ADDR insn_addr)
14482 {
14483
14484 uint32_t no_of_rec = 0;
14485 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14486 ULONGEST t_bit = 0, insn_id = 0;
14487
14488 ULONGEST u_regval = 0;
14489
14490 arm_insn_decode_record arm_record;
14491
14492 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14493 arm_record.regcache = regcache;
14494 arm_record.this_addr = insn_addr;
14495 arm_record.gdbarch = gdbarch;
14496
14497
14498 if (record_debug > 1)
14499 {
14500 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14501 "addr = %s\n",
14502 paddress (gdbarch, arm_record.this_addr));
14503 }
14504
14505 instruction_reader reader;
14506 if (extract_arm_insn (reader, &arm_record, 2))
14507 {
14508 if (record_debug)
14509 {
14510 gdb_printf (gdb_stdlog,
14511 _("Process record: error reading memory at "
14512 "addr %s len = %d.\n"),
14513 paddress (arm_record.gdbarch,
14514 arm_record.this_addr), 2);
14515 }
14516 return -1;
14517 }
14518
14519 /* Check the insn, whether it is thumb or arm one. */
14520
14521 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14522 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14523
14524
14525 if (!(u_regval & t_bit))
14526 {
14527 /* We are decoding arm insn. */
14528 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14529 }
14530 else
14531 {
14532 insn_id = bits (arm_record.arm_insn, 11, 15);
14533 /* is it thumb2 insn? */
14534 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14535 {
14536 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14537 THUMB2_INSN_SIZE_BYTES);
14538 }
14539 else
14540 {
14541 /* We are decoding thumb insn. */
14542 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14543 THUMB_INSN_SIZE_BYTES);
14544 }
14545 }
14546
14547 if (0 == ret)
14548 {
14549 /* Record registers. */
14550 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14551 if (arm_record.arm_regs)
14552 {
14553 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14554 {
14555 if (record_full_arch_list_add_reg
14556 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14557 ret = -1;
14558 }
14559 }
14560 /* Record memories. */
14561 if (arm_record.arm_mems)
14562 {
14563 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14564 {
14565 if (record_full_arch_list_add_mem
14566 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14567 arm_record.arm_mems[no_of_rec].len))
14568 ret = -1;
14569 }
14570 }
14571
14572 if (record_full_arch_list_add_end ())
14573 ret = -1;
14574 }
14575
14576
14577 deallocate_reg_mem (&arm_record);
14578
14579 return ret;
14580 }
14581
14582 /* See arm-tdep.h. */
14583
14584 const target_desc *
14585 arm_read_description (arm_fp_type fp_type, bool tls)
14586 {
14587 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14588
14589 if (tdesc == nullptr)
14590 {
14591 tdesc = arm_create_target_description (fp_type, tls);
14592 tdesc_arm_list[fp_type][tls] = tdesc;
14593 }
14594
14595 return tdesc;
14596 }
14597
14598 /* See arm-tdep.h. */
14599
14600 const target_desc *
14601 arm_read_mprofile_description (arm_m_profile_type m_type)
14602 {
14603 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14604
14605 if (tdesc == nullptr)
14606 {
14607 tdesc = arm_create_mprofile_target_description (m_type);
14608 tdesc_arm_mprofile_list[m_type] = tdesc;
14609 }
14610
14611 return tdesc;
14612 }