e36bde9b3da1b9b6a62df5afdc7e0afa3c8a399e
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2.h"
42 #include "dwarf2/frame.h"
43 #include "gdbtypes.h"
44 #include "prologue-value.h"
45 #include "remote.h"
46 #include "target-descriptions.h"
47 #include "user-regs.h"
48 #include "observable.h"
49 #include "count-one-bits.h"
50
51 #include "arch/arm.h"
52 #include "arch/arm-get-next-pcs.h"
53 #include "arm-tdep.h"
54 #include "gdb/sim-arm.h"
55
56 #include "elf-bfd.h"
57 #include "coff/internal.h"
58 #include "elf/arm.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "producer.h"
65
66 #if GDB_SELF_TEST
67 #include "gdbsupport/selftest.h"
68 #endif
69
70 static bool arm_debug;
71
72 /* Print an "arm" debug statement. */
73
74 #define arm_debug_printf(fmt, ...) \
75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76
77 /* Macros for setting and testing a bit in a minimal symbol that marks
78 it as Thumb function. The MSB of the minimal symbol's "info" field
79 is used for this purpose.
80
81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83
84 #define MSYMBOL_SET_SPECIAL(msym) \
85 (msym)->set_target_flag_1 (true)
86
87 #define MSYMBOL_IS_SPECIAL(msym) \
88 (msym)->target_flag_1 ()
89
90 struct arm_mapping_symbol
91 {
92 CORE_ADDR value;
93 char type;
94
95 bool operator< (const arm_mapping_symbol &other) const
96 { return this->value < other.value; }
97 };
98
99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
100
101 struct arm_per_bfd
102 {
103 explicit arm_per_bfd (size_t num_sections)
104 : section_maps (new arm_mapping_symbol_vec[num_sections]),
105 section_maps_sorted (new bool[num_sections] ())
106 {}
107
108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109
110 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111
112 The format is an array of vectors of arm_mapping_symbols, there is one
113 vector for each section of the objfile (the array is index by BFD section
114 index).
115
116 For each section, the vector of arm_mapping_symbol is sorted by
117 symbol value (address). */
118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119
120 /* For each corresponding element of section_maps above, is this vector
121 sorted. */
122 std::unique_ptr<bool[]> section_maps_sorted;
123 };
124
125 /* Per-bfd data used for mapping symbols. */
126 static bfd_key<arm_per_bfd> arm_bfd_data_key;
127
128 /* The list of available "set arm ..." and "show arm ..." commands. */
129 static struct cmd_list_element *setarmcmdlist = NULL;
130 static struct cmd_list_element *showarmcmdlist = NULL;
131
132 /* The type of floating-point to use. Keep this in sync with enum
133 arm_float_model, and the help string in _initialize_arm_tdep. */
134 static const char *const fp_model_strings[] =
135 {
136 "auto",
137 "softfpa",
138 "fpa",
139 "softvfp",
140 "vfp",
141 NULL
142 };
143
144 /* A variable that can be configured by the user. */
145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
146 static const char *current_fp_model = "auto";
147
148 /* The ABI to use. Keep this in sync with arm_abi_kind. */
149 static const char *const arm_abi_strings[] =
150 {
151 "auto",
152 "APCS",
153 "AAPCS",
154 NULL
155 };
156
157 /* A variable that can be configured by the user. */
158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
159 static const char *arm_abi_string = "auto";
160
161 /* The execution mode to assume. */
162 static const char *const arm_mode_strings[] =
163 {
164 "auto",
165 "arm",
166 "thumb",
167 NULL
168 };
169
170 static const char *arm_fallback_mode_string = "auto";
171 static const char *arm_force_mode_string = "auto";
172
173 /* The standard register names, and all the valid aliases for them. Note
174 that `fp', `sp' and `pc' are not added in this alias list, because they
175 have been added as builtin user registers in
176 std-regs.c:_initialize_frame_reg. */
177 static const struct
178 {
179 const char *name;
180 int regnum;
181 } arm_register_aliases[] = {
182 /* Basic register numbers. */
183 { "r0", 0 },
184 { "r1", 1 },
185 { "r2", 2 },
186 { "r3", 3 },
187 { "r4", 4 },
188 { "r5", 5 },
189 { "r6", 6 },
190 { "r7", 7 },
191 { "r8", 8 },
192 { "r9", 9 },
193 { "r10", 10 },
194 { "r11", 11 },
195 { "r12", 12 },
196 { "r13", 13 },
197 { "r14", 14 },
198 { "r15", 15 },
199 /* Synonyms (argument and variable registers). */
200 { "a1", 0 },
201 { "a2", 1 },
202 { "a3", 2 },
203 { "a4", 3 },
204 { "v1", 4 },
205 { "v2", 5 },
206 { "v3", 6 },
207 { "v4", 7 },
208 { "v5", 8 },
209 { "v6", 9 },
210 { "v7", 10 },
211 { "v8", 11 },
212 /* Other platform-specific names for r9. */
213 { "sb", 9 },
214 { "tr", 9 },
215 /* Special names. */
216 { "ip", 12 },
217 { "lr", 14 },
218 /* Names used by GCC (not listed in the ARM EABI). */
219 { "sl", 10 },
220 /* A special name from the older ATPCS. */
221 { "wr", 7 },
222 };
223
224 static const char *const arm_register_names[] =
225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
231 "fps", "cpsr" }; /* 24 25 */
232
233 /* Holds the current set of options to be passed to the disassembler. */
234 static char *arm_disassembler_options;
235
236 /* Valid register name styles. */
237 static const char **valid_disassembly_styles;
238
239 /* Disassembly style to use. Default to "std" register names. */
240 static const char *disassembly_style;
241
242 /* All possible arm target descriptors. */
243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245
246 /* This is used to keep the bfd arch_info in sync with the disassembly
247 style. */
248 static void set_disassembly_style_sfunc (const char *, int,
249 struct cmd_list_element *);
250 static void show_disassembly_style_sfunc (struct ui_file *, int,
251 struct cmd_list_element *,
252 const char *);
253
254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
255 readable_regcache *regcache,
256 int regnum, gdb_byte *buf);
257 static void arm_neon_quad_write (struct gdbarch *gdbarch,
258 struct regcache *regcache,
259 int regnum, const gdb_byte *buf);
260
261 static CORE_ADDR
262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
263
264
265 /* get_next_pcs operations. */
266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
267 arm_get_next_pcs_read_memory_unsigned_integer,
268 arm_get_next_pcs_syscall_next_pc,
269 arm_get_next_pcs_addr_bits_remove,
270 arm_get_next_pcs_is_thumb,
271 NULL,
272 };
273
274 struct arm_prologue_cache
275 {
276 /* The stack pointer at the time this frame was created; i.e. the
277 caller's stack pointer when this function was called. It is used
278 to identify this frame. */
279 CORE_ADDR sp;
280
281 /* Additional stack pointers used by M-profile with Security extension. */
282 /* Use msp_s / psp_s to hold the values of msp / psp when there is
283 no Security extension. */
284 CORE_ADDR msp_s;
285 CORE_ADDR msp_ns;
286 CORE_ADDR psp_s;
287 CORE_ADDR psp_ns;
288
289 /* Active stack pointer. */
290 int active_sp_regnum;
291 int active_msp_regnum;
292 int active_psp_regnum;
293
294 /* The frame base for this frame is just prev_sp - frame size.
295 FRAMESIZE is the distance from the frame pointer to the
296 initial stack pointer. */
297
298 int framesize;
299
300 /* The register used to hold the frame pointer for this frame. */
301 int framereg;
302
303 /* True if the return address is signed, false otherwise. */
304 gdb::optional<bool> ra_signed_state;
305
306 /* Saved register offsets. */
307 trad_frame_saved_reg *saved_regs;
308
309 arm_prologue_cache() = default;
310 };
311
312
313 /* Reconstruct T bit in program status register from LR value. */
314
315 static inline ULONGEST
316 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
317 {
318 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
319 if (IS_THUMB_ADDR (lr))
320 psr |= t_bit;
321 else
322 psr &= ~t_bit;
323
324 return psr;
325 }
326
327 /* Initialize stack pointers, and flag the active one. */
328
329 static inline void
330 arm_cache_init_sp (int regnum, CORE_ADDR* member,
331 struct arm_prologue_cache *cache,
332 struct frame_info *frame)
333 {
334 CORE_ADDR val = get_frame_register_unsigned (frame, regnum);
335 if (val == cache->sp)
336 cache->active_sp_regnum = regnum;
337
338 *member = val;
339 }
340
341 /* Initialize CACHE fields for which zero is not adequate (CACHE is
342 expected to have been ZALLOC'ed before calling this function). */
343
344 static void
345 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
346 {
347 cache->active_sp_regnum = ARM_SP_REGNUM;
348
349 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
350 }
351
352 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
353
354 static void
355 arm_cache_init (struct arm_prologue_cache *cache, struct frame_info *frame)
356 {
357 struct gdbarch *gdbarch = get_frame_arch (frame);
358 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
359
360 arm_cache_init (cache, gdbarch);
361 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
362
363 if (tdep->have_sec_ext)
364 {
365 CORE_ADDR msp_val = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
366 CORE_ADDR psp_val = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
367
368 arm_cache_init_sp (tdep->m_profile_msp_s_regnum, &cache->msp_s, cache, frame);
369 arm_cache_init_sp (tdep->m_profile_psp_s_regnum, &cache->psp_s, cache, frame);
370 arm_cache_init_sp (tdep->m_profile_msp_ns_regnum, &cache->msp_ns, cache, frame);
371 arm_cache_init_sp (tdep->m_profile_psp_ns_regnum, &cache->psp_ns, cache, frame);
372
373 if (msp_val == cache->msp_s)
374 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
375 else if (msp_val == cache->msp_ns)
376 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
377 if (psp_val == cache->psp_s)
378 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
379 else if (psp_val == cache->psp_ns)
380 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
381
382 /* Use MSP_S as default stack pointer. */
383 if (cache->active_sp_regnum == ARM_SP_REGNUM)
384 cache->active_sp_regnum = tdep->m_profile_msp_s_regnum;
385 }
386 else if (tdep->is_m)
387 {
388 arm_cache_init_sp (tdep->m_profile_msp_regnum, &cache->msp_s, cache, frame);
389 arm_cache_init_sp (tdep->m_profile_psp_regnum, &cache->psp_s, cache, frame);
390 }
391 else
392 arm_cache_init_sp (ARM_SP_REGNUM, &cache->msp_s, cache, frame);
393 }
394
395 /* Return the requested stack pointer value (in REGNUM), taking into
396 account whether we have a Security extension or an M-profile
397 CPU. */
398
399 static CORE_ADDR
400 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
401 arm_gdbarch_tdep *tdep, int regnum)
402 {
403 if (tdep->have_sec_ext)
404 {
405 if (regnum == tdep->m_profile_msp_s_regnum)
406 return cache->msp_s;
407 if (regnum == tdep->m_profile_msp_ns_regnum)
408 return cache->msp_ns;
409 if (regnum == tdep->m_profile_psp_s_regnum)
410 return cache->psp_s;
411 if (regnum == tdep->m_profile_psp_ns_regnum)
412 return cache->psp_ns;
413 if (regnum == tdep->m_profile_msp_regnum)
414 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
415 if (regnum == tdep->m_profile_psp_regnum)
416 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
417 if (regnum == ARM_SP_REGNUM)
418 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
419 }
420 else if (tdep->is_m)
421 {
422 if (regnum == tdep->m_profile_msp_regnum)
423 return cache->msp_s;
424 if (regnum == tdep->m_profile_psp_regnum)
425 return cache->psp_s;
426 if (regnum == ARM_SP_REGNUM)
427 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
428 }
429 else if (regnum == ARM_SP_REGNUM)
430 return cache->sp;
431
432 gdb_assert_not_reached ("Invalid SP selection");
433 }
434
435 /* Return the previous stack address, depending on which SP register
436 is active. */
437
438 static CORE_ADDR
439 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
440 {
441 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
442 return val;
443 }
444
445 /* Set the active stack pointer to VAL. */
446
447 static void
448 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
449 arm_gdbarch_tdep *tdep, CORE_ADDR val)
450 {
451 if (tdep->have_sec_ext)
452 {
453 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
454 cache->msp_s = val;
455 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
456 cache->msp_ns = val;
457 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
458 cache->psp_s = val;
459 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
460 cache->psp_ns = val;
461
462 return;
463 }
464 else if (tdep->is_m)
465 {
466 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
467 cache->msp_s = val;
468 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
469 cache->psp_s = val;
470
471 return;
472 }
473 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
474 {
475 cache->sp = val;
476 return;
477 }
478
479 gdb_assert_not_reached ("Invalid SP selection");
480 }
481
482 /* Return true if REGNUM is one of the stack pointers. */
483
484 static bool
485 arm_cache_is_sp_register (struct arm_prologue_cache *cache,
486 arm_gdbarch_tdep *tdep, int regnum)
487 {
488 if ((regnum == ARM_SP_REGNUM)
489 || (regnum == tdep->m_profile_msp_regnum)
490 || (regnum == tdep->m_profile_msp_s_regnum)
491 || (regnum == tdep->m_profile_msp_ns_regnum)
492 || (regnum == tdep->m_profile_psp_regnum)
493 || (regnum == tdep->m_profile_psp_s_regnum)
494 || (regnum == tdep->m_profile_psp_ns_regnum))
495 return true;
496 else
497 return false;
498 }
499
500 /* Set the active stack pointer to SP_REGNUM. */
501
502 static void
503 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
504 arm_gdbarch_tdep *tdep, int sp_regnum)
505 {
506 gdb_assert (sp_regnum != ARM_SP_REGNUM);
507 gdb_assert (arm_cache_is_sp_register (cache, tdep, sp_regnum));
508
509 if (tdep->have_sec_ext)
510 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
511 && sp_regnum != tdep->m_profile_psp_regnum);
512
513 cache->active_sp_regnum = sp_regnum;
514 }
515
516 namespace {
517
518 /* Abstract class to read ARM instructions from memory. */
519
520 class arm_instruction_reader
521 {
522 public:
523 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
524 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
525 };
526
527 /* Read instructions from target memory. */
528
529 class target_arm_instruction_reader : public arm_instruction_reader
530 {
531 public:
532 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
533 {
534 return read_code_unsigned_integer (memaddr, 4, byte_order);
535 }
536 };
537
538 } /* namespace */
539
540 static CORE_ADDR arm_analyze_prologue
541 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
542 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
543
544 /* Architecture version for displaced stepping. This effects the behaviour of
545 certain instructions, and really should not be hard-wired. */
546
547 #define DISPLACED_STEPPING_ARCH_VERSION 5
548
549 /* See arm-tdep.h. */
550
551 bool arm_apcs_32 = true;
552 bool arm_unwind_secure_frames = true;
553
554 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
555
556 int
557 arm_psr_thumb_bit (struct gdbarch *gdbarch)
558 {
559 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
560
561 if (tdep->is_m)
562 return XPSR_T;
563 else
564 return CPSR_T;
565 }
566
567 /* Determine if the processor is currently executing in Thumb mode. */
568
569 int
570 arm_is_thumb (struct regcache *regcache)
571 {
572 ULONGEST cpsr;
573 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
574
575 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
576
577 return (cpsr & t_bit) != 0;
578 }
579
580 /* Determine if FRAME is executing in Thumb mode. */
581
582 int
583 arm_frame_is_thumb (struct frame_info *frame)
584 {
585 CORE_ADDR cpsr;
586 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
587
588 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
589 directly (from a signal frame or dummy frame) or by interpreting
590 the saved LR (from a prologue or DWARF frame). So consult it and
591 trust the unwinders. */
592 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
593
594 return (cpsr & t_bit) != 0;
595 }
596
597 /* Search for the mapping symbol covering MEMADDR. If one is found,
598 return its type. Otherwise, return 0. If START is non-NULL,
599 set *START to the location of the mapping symbol. */
600
601 static char
602 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
603 {
604 struct obj_section *sec;
605
606 /* If there are mapping symbols, consult them. */
607 sec = find_pc_section (memaddr);
608 if (sec != NULL)
609 {
610 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
611 if (data != NULL)
612 {
613 unsigned int section_idx = sec->the_bfd_section->index;
614 arm_mapping_symbol_vec &map
615 = data->section_maps[section_idx];
616
617 /* Sort the vector on first use. */
618 if (!data->section_maps_sorted[section_idx])
619 {
620 std::sort (map.begin (), map.end ());
621 data->section_maps_sorted[section_idx] = true;
622 }
623
624 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
625 arm_mapping_symbol_vec::const_iterator it
626 = std::lower_bound (map.begin (), map.end (), map_key);
627
628 /* std::lower_bound finds the earliest ordered insertion
629 point. If the symbol at this position starts at this exact
630 address, we use that; otherwise, the preceding
631 mapping symbol covers this address. */
632 if (it < map.end ())
633 {
634 if (it->value == map_key.value)
635 {
636 if (start)
637 *start = it->value + sec->addr ();
638 return it->type;
639 }
640 }
641
642 if (it > map.begin ())
643 {
644 arm_mapping_symbol_vec::const_iterator prev_it
645 = it - 1;
646
647 if (start)
648 *start = prev_it->value + sec->addr ();
649 return prev_it->type;
650 }
651 }
652 }
653
654 return 0;
655 }
656
657 /* Determine if the program counter specified in MEMADDR is in a Thumb
658 function. This function should be called for addresses unrelated to
659 any executing frame; otherwise, prefer arm_frame_is_thumb. */
660
661 int
662 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
663 {
664 struct bound_minimal_symbol sym;
665 char type;
666 arm_displaced_step_copy_insn_closure *dsc = nullptr;
667 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
668
669 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
670 dsc = ((arm_displaced_step_copy_insn_closure * )
671 gdbarch_displaced_step_copy_insn_closure_by_addr
672 (gdbarch, current_inferior (), memaddr));
673
674 /* If checking the mode of displaced instruction in copy area, the mode
675 should be determined by instruction on the original address. */
676 if (dsc)
677 {
678 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
679 (unsigned long) dsc->insn_addr,
680 (unsigned long) memaddr);
681 memaddr = dsc->insn_addr;
682 }
683
684 /* If bit 0 of the address is set, assume this is a Thumb address. */
685 if (IS_THUMB_ADDR (memaddr))
686 return 1;
687
688 /* If the user wants to override the symbol table, let him. */
689 if (strcmp (arm_force_mode_string, "arm") == 0)
690 return 0;
691 if (strcmp (arm_force_mode_string, "thumb") == 0)
692 return 1;
693
694 /* ARM v6-M and v7-M are always in Thumb mode. */
695 if (tdep->is_m)
696 return 1;
697
698 /* If there are mapping symbols, consult them. */
699 type = arm_find_mapping_symbol (memaddr, NULL);
700 if (type)
701 return type == 't';
702
703 /* Thumb functions have a "special" bit set in minimal symbols. */
704 sym = lookup_minimal_symbol_by_pc (memaddr);
705 if (sym.minsym)
706 return (MSYMBOL_IS_SPECIAL (sym.minsym));
707
708 /* If the user wants to override the fallback mode, let them. */
709 if (strcmp (arm_fallback_mode_string, "arm") == 0)
710 return 0;
711 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
712 return 1;
713
714 /* If we couldn't find any symbol, but we're talking to a running
715 target, then trust the current value of $cpsr. This lets
716 "display/i $pc" always show the correct mode (though if there is
717 a symbol table we will not reach here, so it still may not be
718 displayed in the mode it will be executed). */
719 if (target_has_registers ())
720 return arm_frame_is_thumb (get_current_frame ());
721
722 /* Otherwise we're out of luck; we assume ARM. */
723 return 0;
724 }
725
726 /* Determine if the address specified equals any of these magic return
727 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
728 architectures.
729
730 From ARMv6-M Reference Manual B1.5.8
731 Table B1-5 Exception return behavior
732
733 EXC_RETURN Return To Return Stack
734 0xFFFFFFF1 Handler mode Main
735 0xFFFFFFF9 Thread mode Main
736 0xFFFFFFFD Thread mode Process
737
738 From ARMv7-M Reference Manual B1.5.8
739 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
740
741 EXC_RETURN Return To Return Stack
742 0xFFFFFFF1 Handler mode Main
743 0xFFFFFFF9 Thread mode Main
744 0xFFFFFFFD Thread mode Process
745
746 Table B1-9 EXC_RETURN definition of exception return behavior, with
747 FP
748
749 EXC_RETURN Return To Return Stack Frame Type
750 0xFFFFFFE1 Handler mode Main Extended
751 0xFFFFFFE9 Thread mode Main Extended
752 0xFFFFFFED Thread mode Process Extended
753 0xFFFFFFF1 Handler mode Main Basic
754 0xFFFFFFF9 Thread mode Main Basic
755 0xFFFFFFFD Thread mode Process Basic
756
757 For more details see "B1.5.8 Exception return behavior"
758 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
759
760 In the ARMv8-M Architecture Technical Reference also adds
761 for implementations without the Security Extension:
762
763 EXC_RETURN Condition
764 0xFFFFFFB0 Return to Handler mode.
765 0xFFFFFFB8 Return to Thread mode using the main stack.
766 0xFFFFFFBC Return to Thread mode using the process stack. */
767
768 static int
769 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
770 {
771 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
772 if (tdep->have_sec_ext)
773 {
774 switch ((addr & 0xff000000))
775 {
776 case 0xff000000: /* EXC_RETURN pattern. */
777 case 0xfe000000: /* FNC_RETURN pattern. */
778 return 1;
779 default:
780 return 0;
781 }
782 }
783 else
784 {
785 switch (addr)
786 {
787 /* Values from ARMv8-M Architecture Technical Reference. */
788 case 0xffffffb0:
789 case 0xffffffb8:
790 case 0xffffffbc:
791 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
792 the exception return behavior. */
793 case 0xffffffe1:
794 case 0xffffffe9:
795 case 0xffffffed:
796 case 0xfffffff1:
797 case 0xfffffff9:
798 case 0xfffffffd:
799 /* Address is magic. */
800 return 1;
801
802 default:
803 /* Address is not magic. */
804 return 0;
805 }
806 }
807 }
808
809 /* Remove useless bits from addresses in a running program. */
810 static CORE_ADDR
811 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
812 {
813 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
814
815 /* On M-profile devices, do not strip the low bit from EXC_RETURN
816 (the magic exception return address). */
817 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
818 return val;
819
820 if (arm_apcs_32)
821 return UNMAKE_THUMB_ADDR (val);
822 else
823 return (val & 0x03fffffc);
824 }
825
826 /* Return 1 if PC is the start of a compiler helper function which
827 can be safely ignored during prologue skipping. IS_THUMB is true
828 if the function is known to be a Thumb function due to the way it
829 is being called. */
830 static int
831 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
832 {
833 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
834 struct bound_minimal_symbol msym;
835
836 msym = lookup_minimal_symbol_by_pc (pc);
837 if (msym.minsym != NULL
838 && msym.value_address () == pc
839 && msym.minsym->linkage_name () != NULL)
840 {
841 const char *name = msym.minsym->linkage_name ();
842
843 /* The GNU linker's Thumb call stub to foo is named
844 __foo_from_thumb. */
845 if (strstr (name, "_from_thumb") != NULL)
846 name += 2;
847
848 /* On soft-float targets, __truncdfsf2 is called to convert promoted
849 arguments to their argument types in non-prototyped
850 functions. */
851 if (startswith (name, "__truncdfsf2"))
852 return 1;
853 if (startswith (name, "__aeabi_d2f"))
854 return 1;
855
856 /* Internal functions related to thread-local storage. */
857 if (startswith (name, "__tls_get_addr"))
858 return 1;
859 if (startswith (name, "__aeabi_read_tp"))
860 return 1;
861 }
862 else
863 {
864 /* If we run against a stripped glibc, we may be unable to identify
865 special functions by name. Check for one important case,
866 __aeabi_read_tp, by comparing the *code* against the default
867 implementation (this is hand-written ARM assembler in glibc). */
868
869 if (!is_thumb
870 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
871 == 0xe3e00a0f /* mov r0, #0xffff0fff */
872 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
873 == 0xe240f01f) /* sub pc, r0, #31 */
874 return 1;
875 }
876
877 return 0;
878 }
879
880 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
881 the first 16-bit of instruction, and INSN2 is the second 16-bit of
882 instruction. */
883 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
884 ((bits ((insn1), 0, 3) << 12) \
885 | (bits ((insn1), 10, 10) << 11) \
886 | (bits ((insn2), 12, 14) << 8) \
887 | bits ((insn2), 0, 7))
888
889 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
890 the 32-bit instruction. */
891 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
892 ((bits ((insn), 16, 19) << 12) \
893 | bits ((insn), 0, 11))
894
895 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
896
897 static unsigned int
898 thumb_expand_immediate (unsigned int imm)
899 {
900 unsigned int count = imm >> 7;
901
902 if (count < 8)
903 switch (count / 2)
904 {
905 case 0:
906 return imm & 0xff;
907 case 1:
908 return (imm & 0xff) | ((imm & 0xff) << 16);
909 case 2:
910 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
911 case 3:
912 return (imm & 0xff) | ((imm & 0xff) << 8)
913 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
914 }
915
916 return (0x80 | (imm & 0x7f)) << (32 - count);
917 }
918
919 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
920 epilogue, 0 otherwise. */
921
922 static int
923 thumb_instruction_restores_sp (unsigned short insn)
924 {
925 return (insn == 0x46bd /* mov sp, r7 */
926 || (insn & 0xff80) == 0xb000 /* add sp, imm */
927 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
928 }
929
930 /* Analyze a Thumb prologue, looking for a recognizable stack frame
931 and frame pointer. Scan until we encounter a store that could
932 clobber the stack frame unexpectedly, or an unknown instruction.
933 Return the last address which is definitely safe to skip for an
934 initial breakpoint. */
935
936 static CORE_ADDR
937 thumb_analyze_prologue (struct gdbarch *gdbarch,
938 CORE_ADDR start, CORE_ADDR limit,
939 struct arm_prologue_cache *cache)
940 {
941 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
942 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
943 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
944 int i;
945 pv_t regs[16];
946 CORE_ADDR offset;
947 CORE_ADDR unrecognized_pc = 0;
948
949 for (i = 0; i < 16; i++)
950 regs[i] = pv_register (i, 0);
951 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
952
953 while (start < limit)
954 {
955 unsigned short insn;
956 gdb::optional<bool> ra_signed_state;
957
958 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
959
960 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
961 {
962 int regno;
963 int mask;
964
965 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
966 break;
967
968 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
969 whether to save LR (R14). */
970 mask = (insn & 0xff) | ((insn & 0x100) << 6);
971
972 /* Calculate offsets of saved R0-R7 and LR. */
973 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
974 if (mask & (1 << regno))
975 {
976 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
977 -4);
978 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
979 }
980 }
981 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
982 {
983 offset = (insn & 0x7f) << 2; /* get scaled offset */
984 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
985 -offset);
986 }
987 else if (thumb_instruction_restores_sp (insn))
988 {
989 /* Don't scan past the epilogue. */
990 break;
991 }
992 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
993 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
994 (insn & 0xff) << 2);
995 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
996 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
997 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
998 bits (insn, 6, 8));
999 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1000 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1001 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1002 bits (insn, 0, 7));
1003 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1004 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1005 && pv_is_constant (regs[bits (insn, 3, 5)]))
1006 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1007 regs[bits (insn, 6, 8)]);
1008 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1009 && pv_is_constant (regs[bits (insn, 3, 6)]))
1010 {
1011 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1012 int rm = bits (insn, 3, 6);
1013 regs[rd] = pv_add (regs[rd], regs[rm]);
1014 }
1015 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1016 {
1017 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1018 int src_reg = (insn & 0x78) >> 3;
1019 regs[dst_reg] = regs[src_reg];
1020 }
1021 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1022 {
1023 /* Handle stores to the stack. Normally pushes are used,
1024 but with GCC -mtpcs-frame, there may be other stores
1025 in the prologue to create the frame. */
1026 int regno = (insn >> 8) & 0x7;
1027 pv_t addr;
1028
1029 offset = (insn & 0xff) << 2;
1030 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1031
1032 if (stack.store_would_trash (addr))
1033 break;
1034
1035 stack.store (addr, 4, regs[regno]);
1036 }
1037 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1038 {
1039 int rd = bits (insn, 0, 2);
1040 int rn = bits (insn, 3, 5);
1041 pv_t addr;
1042
1043 offset = bits (insn, 6, 10) << 2;
1044 addr = pv_add_constant (regs[rn], offset);
1045
1046 if (stack.store_would_trash (addr))
1047 break;
1048
1049 stack.store (addr, 4, regs[rd]);
1050 }
1051 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1052 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1053 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1054 /* Ignore stores of argument registers to the stack. */
1055 ;
1056 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1057 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1058 /* Ignore block loads from the stack, potentially copying
1059 parameters from memory. */
1060 ;
1061 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1062 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1063 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1064 /* Similarly ignore single loads from the stack. */
1065 ;
1066 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1067 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1068 /* Skip register copies, i.e. saves to another register
1069 instead of the stack. */
1070 ;
1071 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1072 /* Recognize constant loads; even with small stacks these are necessary
1073 on Thumb. */
1074 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1075 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1076 {
1077 /* Constant pool loads, for the same reason. */
1078 unsigned int constant;
1079 CORE_ADDR loc;
1080
1081 loc = start + 4 + bits (insn, 0, 7) * 4;
1082 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1083 regs[bits (insn, 8, 10)] = pv_constant (constant);
1084 }
1085 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1086 {
1087 unsigned short inst2;
1088
1089 inst2 = read_code_unsigned_integer (start + 2, 2,
1090 byte_order_for_code);
1091 uint32_t whole_insn = (insn << 16) | inst2;
1092
1093 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1094 {
1095 /* BL, BLX. Allow some special function calls when
1096 skipping the prologue; GCC generates these before
1097 storing arguments to the stack. */
1098 CORE_ADDR nextpc;
1099 int j1, j2, imm1, imm2;
1100
1101 imm1 = sbits (insn, 0, 10);
1102 imm2 = bits (inst2, 0, 10);
1103 j1 = bit (inst2, 13);
1104 j2 = bit (inst2, 11);
1105
1106 offset = ((imm1 << 12) + (imm2 << 1));
1107 offset ^= ((!j2) << 22) | ((!j1) << 23);
1108
1109 nextpc = start + 4 + offset;
1110 /* For BLX make sure to clear the low bits. */
1111 if (bit (inst2, 12) == 0)
1112 nextpc = nextpc & 0xfffffffc;
1113
1114 if (!skip_prologue_function (gdbarch, nextpc,
1115 bit (inst2, 12) != 0))
1116 break;
1117 }
1118
1119 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1120 { registers } */
1121 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1122 {
1123 pv_t addr = regs[bits (insn, 0, 3)];
1124 int regno;
1125
1126 if (stack.store_would_trash (addr))
1127 break;
1128
1129 /* Calculate offsets of saved registers. */
1130 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1131 if (inst2 & (1 << regno))
1132 {
1133 addr = pv_add_constant (addr, -4);
1134 stack.store (addr, 4, regs[regno]);
1135 }
1136
1137 if (insn & 0x0020)
1138 regs[bits (insn, 0, 3)] = addr;
1139 }
1140
1141 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1142 else if ((insn & 0xff20) == 0xed20
1143 && (inst2 & 0x0f00) == 0x0b00
1144 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1145 {
1146 /* Address SP points to. */
1147 pv_t addr = regs[bits (insn, 0, 3)];
1148
1149 /* Number of registers saved. */
1150 unsigned int number = bits (inst2, 0, 7) >> 1;
1151
1152 /* First register to save. */
1153 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1154
1155 if (stack.store_would_trash (addr))
1156 break;
1157
1158 /* Calculate offsets of saved registers. */
1159 for (; number > 0; number--)
1160 {
1161 addr = pv_add_constant (addr, -8);
1162 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1163 + vd + number, 0));
1164 }
1165
1166 /* Writeback SP to account for the saved registers. */
1167 regs[bits (insn, 0, 3)] = addr;
1168 }
1169
1170 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1171 [Rn, #+/-imm]{!} */
1172 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1173 {
1174 int regno1 = bits (inst2, 12, 15);
1175 int regno2 = bits (inst2, 8, 11);
1176 pv_t addr = regs[bits (insn, 0, 3)];
1177
1178 offset = inst2 & 0xff;
1179 if (insn & 0x0080)
1180 addr = pv_add_constant (addr, offset);
1181 else
1182 addr = pv_add_constant (addr, -offset);
1183
1184 if (stack.store_would_trash (addr))
1185 break;
1186
1187 stack.store (addr, 4, regs[regno1]);
1188 stack.store (pv_add_constant (addr, 4),
1189 4, regs[regno2]);
1190
1191 if (insn & 0x0020)
1192 regs[bits (insn, 0, 3)] = addr;
1193 }
1194
1195 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1196 && (inst2 & 0x0c00) == 0x0c00
1197 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1198 {
1199 int regno = bits (inst2, 12, 15);
1200 pv_t addr = regs[bits (insn, 0, 3)];
1201
1202 offset = inst2 & 0xff;
1203 if (inst2 & 0x0200)
1204 addr = pv_add_constant (addr, offset);
1205 else
1206 addr = pv_add_constant (addr, -offset);
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 stack.store (addr, 4, regs[regno]);
1212
1213 if (inst2 & 0x0100)
1214 regs[bits (insn, 0, 3)] = addr;
1215 }
1216
1217 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1218 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1219 {
1220 int regno = bits (inst2, 12, 15);
1221 pv_t addr;
1222
1223 offset = inst2 & 0xfff;
1224 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1225
1226 if (stack.store_would_trash (addr))
1227 break;
1228
1229 stack.store (addr, 4, regs[regno]);
1230 }
1231
1232 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1233 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1234 /* Ignore stores of argument registers to the stack. */
1235 ;
1236
1237 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1238 && (inst2 & 0x0d00) == 0x0c00
1239 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1240 /* Ignore stores of argument registers to the stack. */
1241 ;
1242
1243 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1244 { registers } */
1245 && (inst2 & 0x8000) == 0x0000
1246 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1247 /* Ignore block loads from the stack, potentially copying
1248 parameters from memory. */
1249 ;
1250
1251 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1252 [Rn, #+/-imm] */
1253 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1254 /* Similarly ignore dual loads from the stack. */
1255 ;
1256
1257 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1258 && (inst2 & 0x0d00) == 0x0c00
1259 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1260 /* Similarly ignore single loads from the stack. */
1261 ;
1262
1263 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1264 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1265 /* Similarly ignore single loads from the stack. */
1266 ;
1267
1268 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1269 && (inst2 & 0x8000) == 0x0000)
1270 {
1271 unsigned int imm = ((bits (insn, 10, 10) << 11)
1272 | (bits (inst2, 12, 14) << 8)
1273 | bits (inst2, 0, 7));
1274
1275 regs[bits (inst2, 8, 11)]
1276 = pv_add_constant (regs[bits (insn, 0, 3)],
1277 thumb_expand_immediate (imm));
1278 }
1279
1280 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1281 && (inst2 & 0x8000) == 0x0000)
1282 {
1283 unsigned int imm = ((bits (insn, 10, 10) << 11)
1284 | (bits (inst2, 12, 14) << 8)
1285 | bits (inst2, 0, 7));
1286
1287 regs[bits (inst2, 8, 11)]
1288 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1289 }
1290
1291 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1292 && (inst2 & 0x8000) == 0x0000)
1293 {
1294 unsigned int imm = ((bits (insn, 10, 10) << 11)
1295 | (bits (inst2, 12, 14) << 8)
1296 | bits (inst2, 0, 7));
1297
1298 regs[bits (inst2, 8, 11)]
1299 = pv_add_constant (regs[bits (insn, 0, 3)],
1300 - (CORE_ADDR) thumb_expand_immediate (imm));
1301 }
1302
1303 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1304 && (inst2 & 0x8000) == 0x0000)
1305 {
1306 unsigned int imm = ((bits (insn, 10, 10) << 11)
1307 | (bits (inst2, 12, 14) << 8)
1308 | bits (inst2, 0, 7));
1309
1310 regs[bits (inst2, 8, 11)]
1311 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1312 }
1313
1314 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1315 {
1316 unsigned int imm = ((bits (insn, 10, 10) << 11)
1317 | (bits (inst2, 12, 14) << 8)
1318 | bits (inst2, 0, 7));
1319
1320 regs[bits (inst2, 8, 11)]
1321 = pv_constant (thumb_expand_immediate (imm));
1322 }
1323
1324 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1325 {
1326 unsigned int imm
1327 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1328
1329 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1330 }
1331
1332 else if (insn == 0xea5f /* mov.w Rd,Rm */
1333 && (inst2 & 0xf0f0) == 0)
1334 {
1335 int dst_reg = (inst2 & 0x0f00) >> 8;
1336 int src_reg = inst2 & 0xf;
1337 regs[dst_reg] = regs[src_reg];
1338 }
1339
1340 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1341 {
1342 /* Constant pool loads. */
1343 unsigned int constant;
1344 CORE_ADDR loc;
1345
1346 offset = bits (inst2, 0, 11);
1347 if (insn & 0x0080)
1348 loc = start + 4 + offset;
1349 else
1350 loc = start + 4 - offset;
1351
1352 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1353 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1354 }
1355
1356 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1357 {
1358 /* Constant pool loads. */
1359 unsigned int constant;
1360 CORE_ADDR loc;
1361
1362 offset = bits (inst2, 0, 7) << 2;
1363 if (insn & 0x0080)
1364 loc = start + 4 + offset;
1365 else
1366 loc = start + 4 - offset;
1367
1368 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1369 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1370
1371 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1372 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1373 }
1374 /* Start of ARMv8.1-m PACBTI extension instructions. */
1375 else if (IS_PAC (whole_insn))
1376 {
1377 /* LR and SP are input registers. PAC is in R12. LR is
1378 signed from this point onwards. NOP space. */
1379 ra_signed_state = true;
1380 }
1381 else if (IS_PACBTI (whole_insn))
1382 {
1383 /* LR and SP are input registers. PAC is in R12 and PC is a
1384 valid BTI landing pad. LR is signed from this point onwards.
1385 NOP space. */
1386 ra_signed_state = true;
1387 }
1388 else if (IS_BTI (whole_insn))
1389 {
1390 /* Valid BTI landing pad. NOP space. */
1391 }
1392 else if (IS_PACG (whole_insn))
1393 {
1394 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1395 this point onwards. */
1396 ra_signed_state = true;
1397 }
1398 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1399 {
1400 /* These instructions appear close to the epilogue, when signed
1401 pointers are getting authenticated. */
1402 ra_signed_state = false;
1403 }
1404 /* End of ARMv8.1-m PACBTI extension instructions */
1405 else if (thumb2_instruction_changes_pc (insn, inst2))
1406 {
1407 /* Don't scan past anything that might change control flow. */
1408 break;
1409 }
1410 else
1411 {
1412 /* The optimizer might shove anything into the prologue,
1413 so we just skip what we don't recognize. */
1414 unrecognized_pc = start;
1415 }
1416
1417 /* Make sure we are dealing with a target that supports ARMv8.1-m
1418 PACBTI. */
1419 if (cache != nullptr && tdep->have_pacbti
1420 && ra_signed_state.has_value ())
1421 {
1422 arm_debug_printf ("Found pacbti instruction at %s",
1423 paddress (gdbarch, start));
1424 arm_debug_printf ("RA is %s",
1425 *ra_signed_state? "signed" : "not signed");
1426 cache->ra_signed_state = ra_signed_state;
1427 }
1428
1429 start += 2;
1430 }
1431 else if (thumb_instruction_changes_pc (insn))
1432 {
1433 /* Don't scan past anything that might change control flow. */
1434 break;
1435 }
1436 else
1437 {
1438 /* The optimizer might shove anything into the prologue,
1439 so we just skip what we don't recognize. */
1440 unrecognized_pc = start;
1441 }
1442
1443 start += 2;
1444 }
1445
1446 arm_debug_printf ("Prologue scan stopped at %s",
1447 paddress (gdbarch, start));
1448
1449 if (unrecognized_pc == 0)
1450 unrecognized_pc = start;
1451
1452 if (cache == NULL)
1453 return unrecognized_pc;
1454
1455 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1456 {
1457 /* Frame pointer is fp. Frame size is constant. */
1458 cache->framereg = ARM_FP_REGNUM;
1459 cache->framesize = -regs[ARM_FP_REGNUM].k;
1460 }
1461 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1462 {
1463 /* Frame pointer is r7. Frame size is constant. */
1464 cache->framereg = THUMB_FP_REGNUM;
1465 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1466 }
1467 else
1468 {
1469 /* Try the stack pointer... this is a bit desperate. */
1470 cache->framereg = ARM_SP_REGNUM;
1471 cache->framesize = -regs[ARM_SP_REGNUM].k;
1472 }
1473
1474 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1475 if (stack.find_reg (gdbarch, i, &offset))
1476 {
1477 cache->saved_regs[i].set_addr (offset);
1478 if (i == ARM_SP_REGNUM)
1479 arm_cache_set_active_sp_value(cache, tdep, offset);
1480 }
1481
1482 return unrecognized_pc;
1483 }
1484
1485
1486 /* Try to analyze the instructions starting from PC, which load symbol
1487 __stack_chk_guard. Return the address of instruction after loading this
1488 symbol, set the dest register number to *BASEREG, and set the size of
1489 instructions for loading symbol in OFFSET. Return 0 if instructions are
1490 not recognized. */
1491
1492 static CORE_ADDR
1493 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1494 unsigned int *destreg, int *offset)
1495 {
1496 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1497 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1498 unsigned int low, high, address;
1499
1500 address = 0;
1501 if (is_thumb)
1502 {
1503 unsigned short insn1
1504 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1505
1506 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1507 {
1508 *destreg = bits (insn1, 8, 10);
1509 *offset = 2;
1510 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1511 address = read_memory_unsigned_integer (address, 4,
1512 byte_order_for_code);
1513 }
1514 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1515 {
1516 unsigned short insn2
1517 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1518
1519 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1520
1521 insn1
1522 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1523 insn2
1524 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1525
1526 /* movt Rd, #const */
1527 if ((insn1 & 0xfbc0) == 0xf2c0)
1528 {
1529 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1530 *destreg = bits (insn2, 8, 11);
1531 *offset = 8;
1532 address = (high << 16 | low);
1533 }
1534 }
1535 }
1536 else
1537 {
1538 unsigned int insn
1539 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1540
1541 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1542 {
1543 address = bits (insn, 0, 11) + pc + 8;
1544 address = read_memory_unsigned_integer (address, 4,
1545 byte_order_for_code);
1546
1547 *destreg = bits (insn, 12, 15);
1548 *offset = 4;
1549 }
1550 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1551 {
1552 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1553
1554 insn
1555 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1556
1557 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1558 {
1559 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1560 *destreg = bits (insn, 12, 15);
1561 *offset = 8;
1562 address = (high << 16 | low);
1563 }
1564 }
1565 }
1566
1567 return address;
1568 }
1569
1570 /* Try to skip a sequence of instructions used for stack protector. If PC
1571 points to the first instruction of this sequence, return the address of
1572 first instruction after this sequence, otherwise, return original PC.
1573
1574 On arm, this sequence of instructions is composed of mainly three steps,
1575 Step 1: load symbol __stack_chk_guard,
1576 Step 2: load from address of __stack_chk_guard,
1577 Step 3: store it to somewhere else.
1578
1579 Usually, instructions on step 2 and step 3 are the same on various ARM
1580 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1581 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1582 instructions in step 1 vary from different ARM architectures. On ARMv7,
1583 they are,
1584
1585 movw Rn, #:lower16:__stack_chk_guard
1586 movt Rn, #:upper16:__stack_chk_guard
1587
1588 On ARMv5t, it is,
1589
1590 ldr Rn, .Label
1591 ....
1592 .Lable:
1593 .word __stack_chk_guard
1594
1595 Since ldr/str is a very popular instruction, we can't use them as
1596 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1597 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1598 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1599
1600 static CORE_ADDR
1601 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1602 {
1603 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1604 unsigned int basereg;
1605 struct bound_minimal_symbol stack_chk_guard;
1606 int offset;
1607 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1608 CORE_ADDR addr;
1609
1610 /* Try to parse the instructions in Step 1. */
1611 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1612 &basereg, &offset);
1613 if (!addr)
1614 return pc;
1615
1616 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1617 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1618 Otherwise, this sequence cannot be for stack protector. */
1619 if (stack_chk_guard.minsym == NULL
1620 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1621 return pc;
1622
1623 if (is_thumb)
1624 {
1625 unsigned int destreg;
1626 unsigned short insn
1627 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1628
1629 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1630 if ((insn & 0xf800) != 0x6800)
1631 return pc;
1632 if (bits (insn, 3, 5) != basereg)
1633 return pc;
1634 destreg = bits (insn, 0, 2);
1635
1636 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1637 byte_order_for_code);
1638 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1639 if ((insn & 0xf800) != 0x6000)
1640 return pc;
1641 if (destreg != bits (insn, 0, 2))
1642 return pc;
1643 }
1644 else
1645 {
1646 unsigned int destreg;
1647 unsigned int insn
1648 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1649
1650 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1651 if ((insn & 0x0e500000) != 0x04100000)
1652 return pc;
1653 if (bits (insn, 16, 19) != basereg)
1654 return pc;
1655 destreg = bits (insn, 12, 15);
1656 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1657 insn = read_code_unsigned_integer (pc + offset + 4,
1658 4, byte_order_for_code);
1659 if ((insn & 0x0e500000) != 0x04000000)
1660 return pc;
1661 if (bits (insn, 12, 15) != destreg)
1662 return pc;
1663 }
1664 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1665 on arm. */
1666 if (is_thumb)
1667 return pc + offset + 4;
1668 else
1669 return pc + offset + 8;
1670 }
1671
1672 /* Advance the PC across any function entry prologue instructions to
1673 reach some "real" code.
1674
1675 The APCS (ARM Procedure Call Standard) defines the following
1676 prologue:
1677
1678 mov ip, sp
1679 [stmfd sp!, {a1,a2,a3,a4}]
1680 stmfd sp!, {...,fp,ip,lr,pc}
1681 [stfe f7, [sp, #-12]!]
1682 [stfe f6, [sp, #-12]!]
1683 [stfe f5, [sp, #-12]!]
1684 [stfe f4, [sp, #-12]!]
1685 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1686
1687 static CORE_ADDR
1688 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1689 {
1690 CORE_ADDR func_addr, limit_pc;
1691
1692 /* See if we can determine the end of the prologue via the symbol table.
1693 If so, then return either PC, or the PC after the prologue, whichever
1694 is greater. */
1695 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1696 {
1697 CORE_ADDR post_prologue_pc
1698 = skip_prologue_using_sal (gdbarch, func_addr);
1699 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1700
1701 if (post_prologue_pc)
1702 post_prologue_pc
1703 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1704
1705
1706 /* GCC always emits a line note before the prologue and another
1707 one after, even if the two are at the same address or on the
1708 same line. Take advantage of this so that we do not need to
1709 know every instruction that might appear in the prologue. We
1710 will have producer information for most binaries; if it is
1711 missing (e.g. for -gstabs), assuming the GNU tools. */
1712 if (post_prologue_pc
1713 && (cust == NULL
1714 || cust->producer () == NULL
1715 || startswith (cust->producer (), "GNU ")
1716 || producer_is_llvm (cust->producer ())))
1717 return post_prologue_pc;
1718
1719 if (post_prologue_pc != 0)
1720 {
1721 CORE_ADDR analyzed_limit;
1722
1723 /* For non-GCC compilers, make sure the entire line is an
1724 acceptable prologue; GDB will round this function's
1725 return value up to the end of the following line so we
1726 can not skip just part of a line (and we do not want to).
1727
1728 RealView does not treat the prologue specially, but does
1729 associate prologue code with the opening brace; so this
1730 lets us skip the first line if we think it is the opening
1731 brace. */
1732 if (arm_pc_is_thumb (gdbarch, func_addr))
1733 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1734 post_prologue_pc, NULL);
1735 else
1736 analyzed_limit
1737 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1738 NULL, target_arm_instruction_reader ());
1739
1740 if (analyzed_limit != post_prologue_pc)
1741 return func_addr;
1742
1743 return post_prologue_pc;
1744 }
1745 }
1746
1747 /* Can't determine prologue from the symbol table, need to examine
1748 instructions. */
1749
1750 /* Find an upper limit on the function prologue using the debug
1751 information. If the debug information could not be used to provide
1752 that bound, then use an arbitrary large number as the upper bound. */
1753 /* Like arm_scan_prologue, stop no later than pc + 64. */
1754 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1755 if (limit_pc == 0)
1756 limit_pc = pc + 64; /* Magic. */
1757
1758
1759 /* Check if this is Thumb code. */
1760 if (arm_pc_is_thumb (gdbarch, pc))
1761 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1762 else
1763 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1764 target_arm_instruction_reader ());
1765 }
1766
1767 /* *INDENT-OFF* */
1768 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1769 This function decodes a Thumb function prologue to determine:
1770 1) the size of the stack frame
1771 2) which registers are saved on it
1772 3) the offsets of saved regs
1773 4) the offset from the stack pointer to the frame pointer
1774
1775 A typical Thumb function prologue would create this stack frame
1776 (offsets relative to FP)
1777 old SP -> 24 stack parameters
1778 20 LR
1779 16 R7
1780 R7 -> 0 local variables (16 bytes)
1781 SP -> -12 additional stack space (12 bytes)
1782 The frame size would thus be 36 bytes, and the frame offset would be
1783 12 bytes. The frame register is R7.
1784
1785 The comments for thumb_skip_prolog() describe the algorithm we use
1786 to detect the end of the prolog. */
1787 /* *INDENT-ON* */
1788
1789 static void
1790 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1791 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1792 {
1793 CORE_ADDR prologue_start;
1794 CORE_ADDR prologue_end;
1795
1796 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1797 &prologue_end))
1798 {
1799 /* See comment in arm_scan_prologue for an explanation of
1800 this heuristics. */
1801 if (prologue_end > prologue_start + 64)
1802 {
1803 prologue_end = prologue_start + 64;
1804 }
1805 }
1806 else
1807 /* We're in the boondocks: we have no idea where the start of the
1808 function is. */
1809 return;
1810
1811 prologue_end = std::min (prologue_end, prev_pc);
1812
1813 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1814 }
1815
1816 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1817 otherwise. */
1818
1819 static int
1820 arm_instruction_restores_sp (unsigned int insn)
1821 {
1822 if (bits (insn, 28, 31) != INST_NV)
1823 {
1824 if ((insn & 0x0df0f000) == 0x0080d000
1825 /* ADD SP (register or immediate). */
1826 || (insn & 0x0df0f000) == 0x0040d000
1827 /* SUB SP (register or immediate). */
1828 || (insn & 0x0ffffff0) == 0x01a0d000
1829 /* MOV SP. */
1830 || (insn & 0x0fff0000) == 0x08bd0000
1831 /* POP (LDMIA). */
1832 || (insn & 0x0fff0000) == 0x049d0000)
1833 /* POP of a single register. */
1834 return 1;
1835 }
1836
1837 return 0;
1838 }
1839
1840 /* Implement immediate value decoding, as described in section A5.2.4
1841 (Modified immediate constants in ARM instructions) of the ARM Architecture
1842 Reference Manual (ARMv7-A and ARMv7-R edition). */
1843
1844 static uint32_t
1845 arm_expand_immediate (uint32_t imm)
1846 {
1847 /* Immediate values are 12 bits long. */
1848 gdb_assert ((imm & 0xfffff000) == 0);
1849
1850 uint32_t unrotated_value = imm & 0xff;
1851 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1852
1853 if (rotate_amount == 0)
1854 return unrotated_value;
1855
1856 return ((unrotated_value >> rotate_amount)
1857 | (unrotated_value << (32 - rotate_amount)));
1858 }
1859
1860 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1861 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1862 fill it in. Return the first address not recognized as a prologue
1863 instruction.
1864
1865 We recognize all the instructions typically found in ARM prologues,
1866 plus harmless instructions which can be skipped (either for analysis
1867 purposes, or a more restrictive set that can be skipped when finding
1868 the end of the prologue). */
1869
1870 static CORE_ADDR
1871 arm_analyze_prologue (struct gdbarch *gdbarch,
1872 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1873 struct arm_prologue_cache *cache,
1874 const arm_instruction_reader &insn_reader)
1875 {
1876 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1877 int regno;
1878 CORE_ADDR offset, current_pc;
1879 pv_t regs[ARM_FPS_REGNUM];
1880 CORE_ADDR unrecognized_pc = 0;
1881 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1882
1883 /* Search the prologue looking for instructions that set up the
1884 frame pointer, adjust the stack pointer, and save registers.
1885
1886 Be careful, however, and if it doesn't look like a prologue,
1887 don't try to scan it. If, for instance, a frameless function
1888 begins with stmfd sp!, then we will tell ourselves there is
1889 a frame, which will confuse stack traceback, as well as "finish"
1890 and other operations that rely on a knowledge of the stack
1891 traceback. */
1892
1893 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 regs[regno] = pv_register (regno, 0);
1895 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1896
1897 for (current_pc = prologue_start;
1898 current_pc < prologue_end;
1899 current_pc += 4)
1900 {
1901 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1902
1903 if (insn == 0xe1a0c00d) /* mov ip, sp */
1904 {
1905 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1906 continue;
1907 }
1908 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1909 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1910 {
1911 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1912 int rd = bits (insn, 12, 15);
1913 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1914 continue;
1915 }
1916 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1917 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1918 {
1919 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1920 int rd = bits (insn, 12, 15);
1921 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1922 continue;
1923 }
1924 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1925 [sp, #-4]! */
1926 {
1927 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1928 break;
1929 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1930 stack.store (regs[ARM_SP_REGNUM], 4,
1931 regs[bits (insn, 12, 15)]);
1932 continue;
1933 }
1934 else if ((insn & 0xffff0000) == 0xe92d0000)
1935 /* stmfd sp!, {..., fp, ip, lr, pc}
1936 or
1937 stmfd sp!, {a1, a2, a3, a4} */
1938 {
1939 int mask = insn & 0xffff;
1940
1941 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1942 break;
1943
1944 /* Calculate offsets of saved registers. */
1945 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1946 if (mask & (1 << regno))
1947 {
1948 regs[ARM_SP_REGNUM]
1949 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1950 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1951 }
1952 }
1953 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1954 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1955 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1956 {
1957 /* No need to add this to saved_regs -- it's just an arg reg. */
1958 continue;
1959 }
1960 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1961 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1962 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1963 {
1964 /* No need to add this to saved_regs -- it's just an arg reg. */
1965 continue;
1966 }
1967 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1968 { registers } */
1969 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1970 {
1971 /* No need to add this to saved_regs -- it's just arg regs. */
1972 continue;
1973 }
1974 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1975 {
1976 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1977 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1978 }
1979 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1980 {
1981 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1982 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1983 }
1984 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1985 [sp, -#c]! */
1986 && tdep->have_fpa_registers)
1987 {
1988 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1989 break;
1990
1991 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1992 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1993 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1994 }
1995 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1996 [sp!] */
1997 && tdep->have_fpa_registers)
1998 {
1999 int n_saved_fp_regs;
2000 unsigned int fp_start_reg, fp_bound_reg;
2001
2002 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2003 break;
2004
2005 if ((insn & 0x800) == 0x800) /* N0 is set */
2006 {
2007 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2008 n_saved_fp_regs = 3;
2009 else
2010 n_saved_fp_regs = 1;
2011 }
2012 else
2013 {
2014 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2015 n_saved_fp_regs = 2;
2016 else
2017 n_saved_fp_regs = 4;
2018 }
2019
2020 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2021 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2022 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2023 {
2024 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2025 stack.store (regs[ARM_SP_REGNUM], 12,
2026 regs[fp_start_reg++]);
2027 }
2028 }
2029 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2030 {
2031 /* Allow some special function calls when skipping the
2032 prologue; GCC generates these before storing arguments to
2033 the stack. */
2034 CORE_ADDR dest = BranchDest (current_pc, insn);
2035
2036 if (skip_prologue_function (gdbarch, dest, 0))
2037 continue;
2038 else
2039 break;
2040 }
2041 else if ((insn & 0xf0000000) != 0xe0000000)
2042 break; /* Condition not true, exit early. */
2043 else if (arm_instruction_changes_pc (insn))
2044 /* Don't scan past anything that might change control flow. */
2045 break;
2046 else if (arm_instruction_restores_sp (insn))
2047 {
2048 /* Don't scan past the epilogue. */
2049 break;
2050 }
2051 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2052 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2053 /* Ignore block loads from the stack, potentially copying
2054 parameters from memory. */
2055 continue;
2056 else if ((insn & 0xfc500000) == 0xe4100000
2057 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2058 /* Similarly ignore single loads from the stack. */
2059 continue;
2060 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2061 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2062 register instead of the stack. */
2063 continue;
2064 else
2065 {
2066 /* The optimizer might shove anything into the prologue, if
2067 we build up cache (cache != NULL) from scanning prologue,
2068 we just skip what we don't recognize and scan further to
2069 make cache as complete as possible. However, if we skip
2070 prologue, we'll stop immediately on unrecognized
2071 instruction. */
2072 unrecognized_pc = current_pc;
2073 if (cache != NULL)
2074 continue;
2075 else
2076 break;
2077 }
2078 }
2079
2080 if (unrecognized_pc == 0)
2081 unrecognized_pc = current_pc;
2082
2083 if (cache)
2084 {
2085 int framereg, framesize;
2086
2087 /* The frame size is just the distance from the frame register
2088 to the original stack pointer. */
2089 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2090 {
2091 /* Frame pointer is fp. */
2092 framereg = ARM_FP_REGNUM;
2093 framesize = -regs[ARM_FP_REGNUM].k;
2094 }
2095 else
2096 {
2097 /* Try the stack pointer... this is a bit desperate. */
2098 framereg = ARM_SP_REGNUM;
2099 framesize = -regs[ARM_SP_REGNUM].k;
2100 }
2101
2102 cache->framereg = framereg;
2103 cache->framesize = framesize;
2104
2105 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2106 if (stack.find_reg (gdbarch, regno, &offset))
2107 {
2108 cache->saved_regs[regno].set_addr (offset);
2109 if (regno == ARM_SP_REGNUM)
2110 arm_cache_set_active_sp_value(cache, tdep, offset);
2111 }
2112 }
2113
2114 arm_debug_printf ("Prologue scan stopped at %s",
2115 paddress (gdbarch, unrecognized_pc));
2116
2117 return unrecognized_pc;
2118 }
2119
2120 static void
2121 arm_scan_prologue (struct frame_info *this_frame,
2122 struct arm_prologue_cache *cache)
2123 {
2124 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2125 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2126 CORE_ADDR prologue_start, prologue_end;
2127 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2128 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2129 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2130
2131 /* Assume there is no frame until proven otherwise. */
2132 cache->framereg = ARM_SP_REGNUM;
2133 cache->framesize = 0;
2134
2135 /* Check for Thumb prologue. */
2136 if (arm_frame_is_thumb (this_frame))
2137 {
2138 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2139 return;
2140 }
2141
2142 /* Find the function prologue. If we can't find the function in
2143 the symbol table, peek in the stack frame to find the PC. */
2144 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2145 &prologue_end))
2146 {
2147 /* One way to find the end of the prologue (which works well
2148 for unoptimized code) is to do the following:
2149
2150 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2151
2152 if (sal.line == 0)
2153 prologue_end = prev_pc;
2154 else if (sal.end < prologue_end)
2155 prologue_end = sal.end;
2156
2157 This mechanism is very accurate so long as the optimizer
2158 doesn't move any instructions from the function body into the
2159 prologue. If this happens, sal.end will be the last
2160 instruction in the first hunk of prologue code just before
2161 the first instruction that the scheduler has moved from
2162 the body to the prologue.
2163
2164 In order to make sure that we scan all of the prologue
2165 instructions, we use a slightly less accurate mechanism which
2166 may scan more than necessary. To help compensate for this
2167 lack of accuracy, the prologue scanning loop below contains
2168 several clauses which'll cause the loop to terminate early if
2169 an implausible prologue instruction is encountered.
2170
2171 The expression
2172
2173 prologue_start + 64
2174
2175 is a suitable endpoint since it accounts for the largest
2176 possible prologue plus up to five instructions inserted by
2177 the scheduler. */
2178
2179 if (prologue_end > prologue_start + 64)
2180 {
2181 prologue_end = prologue_start + 64; /* See above. */
2182 }
2183 }
2184 else
2185 {
2186 /* We have no symbol information. Our only option is to assume this
2187 function has a standard stack frame and the normal frame register.
2188 Then, we can find the value of our frame pointer on entrance to
2189 the callee (or at the present moment if this is the innermost frame).
2190 The value stored there should be the address of the stmfd + 8. */
2191 CORE_ADDR frame_loc;
2192 ULONGEST return_value;
2193
2194 /* AAPCS does not use a frame register, so we can abort here. */
2195 if (tdep->arm_abi == ARM_ABI_AAPCS)
2196 return;
2197
2198 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2199 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2200 &return_value))
2201 return;
2202 else
2203 {
2204 prologue_start = gdbarch_addr_bits_remove
2205 (gdbarch, return_value) - 8;
2206 prologue_end = prologue_start + 64; /* See above. */
2207 }
2208 }
2209
2210 if (prev_pc < prologue_end)
2211 prologue_end = prev_pc;
2212
2213 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2214 target_arm_instruction_reader ());
2215 }
2216
2217 static struct arm_prologue_cache *
2218 arm_make_prologue_cache (struct frame_info *this_frame)
2219 {
2220 int reg;
2221 struct arm_prologue_cache *cache;
2222 CORE_ADDR unwound_fp, prev_sp;
2223
2224 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2225 arm_cache_init (cache, this_frame);
2226
2227 arm_scan_prologue (this_frame, cache);
2228
2229 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2230 if (unwound_fp == 0)
2231 return cache;
2232
2233 arm_gdbarch_tdep *tdep =
2234 (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2235
2236 prev_sp = unwound_fp + cache->framesize;
2237 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2238
2239 /* Calculate actual addresses of saved registers using offsets
2240 determined by arm_scan_prologue. */
2241 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2242 if (cache->saved_regs[reg].is_addr ())
2243 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2244 prev_sp);
2245
2246 return cache;
2247 }
2248
2249 /* Implementation of the stop_reason hook for arm_prologue frames. */
2250
2251 static enum unwind_stop_reason
2252 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2253 void **this_cache)
2254 {
2255 struct arm_prologue_cache *cache;
2256 CORE_ADDR pc;
2257
2258 if (*this_cache == NULL)
2259 *this_cache = arm_make_prologue_cache (this_frame);
2260 cache = (struct arm_prologue_cache *) *this_cache;
2261
2262 /* This is meant to halt the backtrace at "_start". */
2263 pc = get_frame_pc (this_frame);
2264 gdbarch *arch = get_frame_arch (this_frame);
2265 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
2266 if (pc <= tdep->lowest_pc)
2267 return UNWIND_OUTERMOST;
2268
2269 /* If we've hit a wall, stop. */
2270 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2271 return UNWIND_OUTERMOST;
2272
2273 return UNWIND_NO_REASON;
2274 }
2275
2276 /* Our frame ID for a normal frame is the current function's starting PC
2277 and the caller's SP when we were called. */
2278
2279 static void
2280 arm_prologue_this_id (struct frame_info *this_frame,
2281 void **this_cache,
2282 struct frame_id *this_id)
2283 {
2284 struct arm_prologue_cache *cache;
2285 struct frame_id id;
2286 CORE_ADDR pc, func;
2287
2288 if (*this_cache == NULL)
2289 *this_cache = arm_make_prologue_cache (this_frame);
2290 cache = (struct arm_prologue_cache *) *this_cache;
2291
2292 arm_gdbarch_tdep *tdep
2293 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2294
2295 /* Use function start address as part of the frame ID. If we cannot
2296 identify the start address (due to missing symbol information),
2297 fall back to just using the current PC. */
2298 pc = get_frame_pc (this_frame);
2299 func = get_frame_func (this_frame);
2300 if (!func)
2301 func = pc;
2302
2303 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2304 *this_id = id;
2305 }
2306
2307 static struct value *
2308 arm_prologue_prev_register (struct frame_info *this_frame,
2309 void **this_cache,
2310 int prev_regnum)
2311 {
2312 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2313 struct arm_prologue_cache *cache;
2314 CORE_ADDR sp_value;
2315
2316 if (*this_cache == NULL)
2317 *this_cache = arm_make_prologue_cache (this_frame);
2318 cache = (struct arm_prologue_cache *) *this_cache;
2319
2320 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2321
2322 /* If this frame has signed the return address, mark it as so. */
2323 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2324 && *cache->ra_signed_state)
2325 set_frame_previous_pc_masked (this_frame);
2326
2327 /* If we are asked to unwind the PC, then we need to return the LR
2328 instead. The prologue may save PC, but it will point into this
2329 frame's prologue, not the next frame's resume location. Also
2330 strip the saved T bit. A valid LR may have the low bit set, but
2331 a valid PC never does. */
2332 if (prev_regnum == ARM_PC_REGNUM)
2333 {
2334 CORE_ADDR lr;
2335
2336 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2337 return frame_unwind_got_constant (this_frame, prev_regnum,
2338 arm_addr_bits_remove (gdbarch, lr));
2339 }
2340
2341 /* SP is generally not saved to the stack, but this frame is
2342 identified by the next frame's stack pointer at the time of the call.
2343 The value was already reconstructed into PREV_SP. */
2344 if (prev_regnum == ARM_SP_REGNUM)
2345 return frame_unwind_got_constant (this_frame, prev_regnum,
2346 arm_cache_get_prev_sp_value (cache, tdep));
2347
2348 /* The value might be one of the alternative SP, if so, use the
2349 value already constructed. */
2350 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
2351 {
2352 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2353 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2354 }
2355
2356 /* The CPSR may have been changed by the call instruction and by the
2357 called function. The only bit we can reconstruct is the T bit,
2358 by checking the low bit of LR as of the call. This is a reliable
2359 indicator of Thumb-ness except for some ARM v4T pre-interworking
2360 Thumb code, which could get away with a clear low bit as long as
2361 the called function did not use bx. Guess that all other
2362 bits are unchanged; the condition flags are presumably lost,
2363 but the processor status is likely valid. */
2364 if (prev_regnum == ARM_PS_REGNUM)
2365 {
2366 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2367 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2368
2369 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2370 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2371 }
2372
2373 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2374 prev_regnum);
2375 }
2376
2377 static frame_unwind arm_prologue_unwind = {
2378 "arm prologue",
2379 NORMAL_FRAME,
2380 arm_prologue_unwind_stop_reason,
2381 arm_prologue_this_id,
2382 arm_prologue_prev_register,
2383 NULL,
2384 default_frame_sniffer
2385 };
2386
2387 /* Maintain a list of ARM exception table entries per objfile, similar to the
2388 list of mapping symbols. We only cache entries for standard ARM-defined
2389 personality routines; the cache will contain only the frame unwinding
2390 instructions associated with the entry (not the descriptors). */
2391
2392 struct arm_exidx_entry
2393 {
2394 CORE_ADDR addr;
2395 gdb_byte *entry;
2396
2397 bool operator< (const arm_exidx_entry &other) const
2398 {
2399 return addr < other.addr;
2400 }
2401 };
2402
2403 struct arm_exidx_data
2404 {
2405 std::vector<std::vector<arm_exidx_entry>> section_maps;
2406 };
2407
2408 /* Per-BFD key to store exception handling information. */
2409 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2410
2411 static struct obj_section *
2412 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2413 {
2414 struct obj_section *osect;
2415
2416 ALL_OBJFILE_OSECTIONS (objfile, osect)
2417 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2418 {
2419 bfd_vma start, size;
2420 start = bfd_section_vma (osect->the_bfd_section);
2421 size = bfd_section_size (osect->the_bfd_section);
2422
2423 if (start <= vma && vma < start + size)
2424 return osect;
2425 }
2426
2427 return NULL;
2428 }
2429
2430 /* Parse contents of exception table and exception index sections
2431 of OBJFILE, and fill in the exception table entry cache.
2432
2433 For each entry that refers to a standard ARM-defined personality
2434 routine, extract the frame unwinding instructions (from either
2435 the index or the table section). The unwinding instructions
2436 are normalized by:
2437 - extracting them from the rest of the table data
2438 - converting to host endianness
2439 - appending the implicit 0xb0 ("Finish") code
2440
2441 The extracted and normalized instructions are stored for later
2442 retrieval by the arm_find_exidx_entry routine. */
2443
2444 static void
2445 arm_exidx_new_objfile (struct objfile *objfile)
2446 {
2447 struct arm_exidx_data *data;
2448 asection *exidx, *extab;
2449 bfd_vma exidx_vma = 0, extab_vma = 0;
2450 LONGEST i;
2451
2452 /* If we've already touched this file, do nothing. */
2453 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2454 return;
2455
2456 /* Read contents of exception table and index. */
2457 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2458 gdb::byte_vector exidx_data;
2459 if (exidx)
2460 {
2461 exidx_vma = bfd_section_vma (exidx);
2462 exidx_data.resize (bfd_section_size (exidx));
2463
2464 if (!bfd_get_section_contents (objfile->obfd, exidx,
2465 exidx_data.data (), 0,
2466 exidx_data.size ()))
2467 return;
2468 }
2469
2470 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2471 gdb::byte_vector extab_data;
2472 if (extab)
2473 {
2474 extab_vma = bfd_section_vma (extab);
2475 extab_data.resize (bfd_section_size (extab));
2476
2477 if (!bfd_get_section_contents (objfile->obfd, extab,
2478 extab_data.data (), 0,
2479 extab_data.size ()))
2480 return;
2481 }
2482
2483 /* Allocate exception table data structure. */
2484 data = arm_exidx_data_key.emplace (objfile->obfd);
2485 data->section_maps.resize (objfile->obfd->section_count);
2486
2487 /* Fill in exception table. */
2488 for (i = 0; i < exidx_data.size () / 8; i++)
2489 {
2490 struct arm_exidx_entry new_exidx_entry;
2491 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2492 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2493 exidx_data.data () + i * 8 + 4);
2494 bfd_vma addr = 0, word = 0;
2495 int n_bytes = 0, n_words = 0;
2496 struct obj_section *sec;
2497 gdb_byte *entry = NULL;
2498
2499 /* Extract address of start of function. */
2500 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2501 idx += exidx_vma + i * 8;
2502
2503 /* Find section containing function and compute section offset. */
2504 sec = arm_obj_section_from_vma (objfile, idx);
2505 if (sec == NULL)
2506 continue;
2507 idx -= bfd_section_vma (sec->the_bfd_section);
2508
2509 /* Determine address of exception table entry. */
2510 if (val == 1)
2511 {
2512 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2513 }
2514 else if ((val & 0xff000000) == 0x80000000)
2515 {
2516 /* Exception table entry embedded in .ARM.exidx
2517 -- must be short form. */
2518 word = val;
2519 n_bytes = 3;
2520 }
2521 else if (!(val & 0x80000000))
2522 {
2523 /* Exception table entry in .ARM.extab. */
2524 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2525 addr += exidx_vma + i * 8 + 4;
2526
2527 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2528 {
2529 word = bfd_h_get_32 (objfile->obfd,
2530 extab_data.data () + addr - extab_vma);
2531 addr += 4;
2532
2533 if ((word & 0xff000000) == 0x80000000)
2534 {
2535 /* Short form. */
2536 n_bytes = 3;
2537 }
2538 else if ((word & 0xff000000) == 0x81000000
2539 || (word & 0xff000000) == 0x82000000)
2540 {
2541 /* Long form. */
2542 n_bytes = 2;
2543 n_words = ((word >> 16) & 0xff);
2544 }
2545 else if (!(word & 0x80000000))
2546 {
2547 bfd_vma pers;
2548 struct obj_section *pers_sec;
2549 int gnu_personality = 0;
2550
2551 /* Custom personality routine. */
2552 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2553 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2554
2555 /* Check whether we've got one of the variants of the
2556 GNU personality routines. */
2557 pers_sec = arm_obj_section_from_vma (objfile, pers);
2558 if (pers_sec)
2559 {
2560 static const char *personality[] =
2561 {
2562 "__gcc_personality_v0",
2563 "__gxx_personality_v0",
2564 "__gcj_personality_v0",
2565 "__gnu_objc_personality_v0",
2566 NULL
2567 };
2568
2569 CORE_ADDR pc = pers + pers_sec->offset ();
2570 int k;
2571
2572 for (k = 0; personality[k]; k++)
2573 if (lookup_minimal_symbol_by_pc_name
2574 (pc, personality[k], objfile))
2575 {
2576 gnu_personality = 1;
2577 break;
2578 }
2579 }
2580
2581 /* If so, the next word contains a word count in the high
2582 byte, followed by the same unwind instructions as the
2583 pre-defined forms. */
2584 if (gnu_personality
2585 && addr + 4 <= extab_vma + extab_data.size ())
2586 {
2587 word = bfd_h_get_32 (objfile->obfd,
2588 (extab_data.data ()
2589 + addr - extab_vma));
2590 addr += 4;
2591 n_bytes = 3;
2592 n_words = ((word >> 24) & 0xff);
2593 }
2594 }
2595 }
2596 }
2597
2598 /* Sanity check address. */
2599 if (n_words)
2600 if (addr < extab_vma
2601 || addr + 4 * n_words > extab_vma + extab_data.size ())
2602 n_words = n_bytes = 0;
2603
2604 /* The unwind instructions reside in WORD (only the N_BYTES least
2605 significant bytes are valid), followed by N_WORDS words in the
2606 extab section starting at ADDR. */
2607 if (n_bytes || n_words)
2608 {
2609 gdb_byte *p = entry
2610 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2611 n_bytes + n_words * 4 + 1);
2612
2613 while (n_bytes--)
2614 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2615
2616 while (n_words--)
2617 {
2618 word = bfd_h_get_32 (objfile->obfd,
2619 extab_data.data () + addr - extab_vma);
2620 addr += 4;
2621
2622 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2623 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2624 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2625 *p++ = (gdb_byte) (word & 0xff);
2626 }
2627
2628 /* Implied "Finish" to terminate the list. */
2629 *p++ = 0xb0;
2630 }
2631
2632 /* Push entry onto vector. They are guaranteed to always
2633 appear in order of increasing addresses. */
2634 new_exidx_entry.addr = idx;
2635 new_exidx_entry.entry = entry;
2636 data->section_maps[sec->the_bfd_section->index].push_back
2637 (new_exidx_entry);
2638 }
2639 }
2640
2641 /* Search for the exception table entry covering MEMADDR. If one is found,
2642 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2643 set *START to the start of the region covered by this entry. */
2644
2645 static gdb_byte *
2646 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2647 {
2648 struct obj_section *sec;
2649
2650 sec = find_pc_section (memaddr);
2651 if (sec != NULL)
2652 {
2653 struct arm_exidx_data *data;
2654 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2655
2656 data = arm_exidx_data_key.get (sec->objfile->obfd);
2657 if (data != NULL)
2658 {
2659 std::vector<arm_exidx_entry> &map
2660 = data->section_maps[sec->the_bfd_section->index];
2661 if (!map.empty ())
2662 {
2663 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2664
2665 /* std::lower_bound finds the earliest ordered insertion
2666 point. If the following symbol starts at this exact
2667 address, we use that; otherwise, the preceding
2668 exception table entry covers this address. */
2669 if (idx < map.end ())
2670 {
2671 if (idx->addr == map_key.addr)
2672 {
2673 if (start)
2674 *start = idx->addr + sec->addr ();
2675 return idx->entry;
2676 }
2677 }
2678
2679 if (idx > map.begin ())
2680 {
2681 idx = idx - 1;
2682 if (start)
2683 *start = idx->addr + sec->addr ();
2684 return idx->entry;
2685 }
2686 }
2687 }
2688 }
2689
2690 return NULL;
2691 }
2692
2693 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2694 instruction list from the ARM exception table entry ENTRY, allocate and
2695 return a prologue cache structure describing how to unwind this frame.
2696
2697 Return NULL if the unwinding instruction list contains a "spare",
2698 "reserved" or "refuse to unwind" instruction as defined in section
2699 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2700 for the ARM Architecture" document. */
2701
2702 static struct arm_prologue_cache *
2703 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2704 {
2705 CORE_ADDR vsp = 0;
2706 int vsp_valid = 0;
2707
2708 struct arm_prologue_cache *cache;
2709 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2710 arm_cache_init (cache, this_frame);
2711
2712 for (;;)
2713 {
2714 gdb_byte insn;
2715
2716 /* Whenever we reload SP, we actually have to retrieve its
2717 actual value in the current frame. */
2718 if (!vsp_valid)
2719 {
2720 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2721 {
2722 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2723 vsp = get_frame_register_unsigned (this_frame, reg);
2724 }
2725 else
2726 {
2727 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2728 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2729 }
2730
2731 vsp_valid = 1;
2732 }
2733
2734 /* Decode next unwind instruction. */
2735 insn = *entry++;
2736
2737 if ((insn & 0xc0) == 0)
2738 {
2739 int offset = insn & 0x3f;
2740 vsp += (offset << 2) + 4;
2741 }
2742 else if ((insn & 0xc0) == 0x40)
2743 {
2744 int offset = insn & 0x3f;
2745 vsp -= (offset << 2) + 4;
2746 }
2747 else if ((insn & 0xf0) == 0x80)
2748 {
2749 int mask = ((insn & 0xf) << 8) | *entry++;
2750 int i;
2751
2752 /* The special case of an all-zero mask identifies
2753 "Refuse to unwind". We return NULL to fall back
2754 to the prologue analyzer. */
2755 if (mask == 0)
2756 return NULL;
2757
2758 /* Pop registers r4..r15 under mask. */
2759 for (i = 0; i < 12; i++)
2760 if (mask & (1 << i))
2761 {
2762 cache->saved_regs[4 + i].set_addr (vsp);
2763 vsp += 4;
2764 }
2765
2766 /* Special-case popping SP -- we need to reload vsp. */
2767 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2768 vsp_valid = 0;
2769 }
2770 else if ((insn & 0xf0) == 0x90)
2771 {
2772 int reg = insn & 0xf;
2773
2774 /* Reserved cases. */
2775 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2776 return NULL;
2777
2778 /* Set SP from another register and mark VSP for reload. */
2779 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2780 vsp_valid = 0;
2781 }
2782 else if ((insn & 0xf0) == 0xa0)
2783 {
2784 int count = insn & 0x7;
2785 int pop_lr = (insn & 0x8) != 0;
2786 int i;
2787
2788 /* Pop r4..r[4+count]. */
2789 for (i = 0; i <= count; i++)
2790 {
2791 cache->saved_regs[4 + i].set_addr (vsp);
2792 vsp += 4;
2793 }
2794
2795 /* If indicated by flag, pop LR as well. */
2796 if (pop_lr)
2797 {
2798 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2799 vsp += 4;
2800 }
2801 }
2802 else if (insn == 0xb0)
2803 {
2804 /* We could only have updated PC by popping into it; if so, it
2805 will show up as address. Otherwise, copy LR into PC. */
2806 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2807 cache->saved_regs[ARM_PC_REGNUM]
2808 = cache->saved_regs[ARM_LR_REGNUM];
2809
2810 /* We're done. */
2811 break;
2812 }
2813 else if (insn == 0xb1)
2814 {
2815 int mask = *entry++;
2816 int i;
2817
2818 /* All-zero mask and mask >= 16 is "spare". */
2819 if (mask == 0 || mask >= 16)
2820 return NULL;
2821
2822 /* Pop r0..r3 under mask. */
2823 for (i = 0; i < 4; i++)
2824 if (mask & (1 << i))
2825 {
2826 cache->saved_regs[i].set_addr (vsp);
2827 vsp += 4;
2828 }
2829 }
2830 else if (insn == 0xb2)
2831 {
2832 ULONGEST offset = 0;
2833 unsigned shift = 0;
2834
2835 do
2836 {
2837 offset |= (*entry & 0x7f) << shift;
2838 shift += 7;
2839 }
2840 while (*entry++ & 0x80);
2841
2842 vsp += 0x204 + (offset << 2);
2843 }
2844 else if (insn == 0xb3)
2845 {
2846 int start = *entry >> 4;
2847 int count = (*entry++) & 0xf;
2848 int i;
2849
2850 /* Only registers D0..D15 are valid here. */
2851 if (start + count >= 16)
2852 return NULL;
2853
2854 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2855 for (i = 0; i <= count; i++)
2856 {
2857 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2858 vsp += 8;
2859 }
2860
2861 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2862 vsp += 4;
2863 }
2864 else if ((insn & 0xf8) == 0xb8)
2865 {
2866 int count = insn & 0x7;
2867 int i;
2868
2869 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2870 for (i = 0; i <= count; i++)
2871 {
2872 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2873 vsp += 8;
2874 }
2875
2876 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2877 vsp += 4;
2878 }
2879 else if (insn == 0xc6)
2880 {
2881 int start = *entry >> 4;
2882 int count = (*entry++) & 0xf;
2883 int i;
2884
2885 /* Only registers WR0..WR15 are valid. */
2886 if (start + count >= 16)
2887 return NULL;
2888
2889 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2890 for (i = 0; i <= count; i++)
2891 {
2892 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2893 vsp += 8;
2894 }
2895 }
2896 else if (insn == 0xc7)
2897 {
2898 int mask = *entry++;
2899 int i;
2900
2901 /* All-zero mask and mask >= 16 is "spare". */
2902 if (mask == 0 || mask >= 16)
2903 return NULL;
2904
2905 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2906 for (i = 0; i < 4; i++)
2907 if (mask & (1 << i))
2908 {
2909 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2910 vsp += 4;
2911 }
2912 }
2913 else if ((insn & 0xf8) == 0xc0)
2914 {
2915 int count = insn & 0x7;
2916 int i;
2917
2918 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2919 for (i = 0; i <= count; i++)
2920 {
2921 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2922 vsp += 8;
2923 }
2924 }
2925 else if (insn == 0xc8)
2926 {
2927 int start = *entry >> 4;
2928 int count = (*entry++) & 0xf;
2929 int i;
2930
2931 /* Only registers D0..D31 are valid. */
2932 if (start + count >= 16)
2933 return NULL;
2934
2935 /* Pop VFP double-precision registers
2936 D[16+start]..D[16+start+count]. */
2937 for (i = 0; i <= count; i++)
2938 {
2939 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2940 vsp += 8;
2941 }
2942 }
2943 else if (insn == 0xc9)
2944 {
2945 int start = *entry >> 4;
2946 int count = (*entry++) & 0xf;
2947 int i;
2948
2949 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2950 for (i = 0; i <= count; i++)
2951 {
2952 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2953 vsp += 8;
2954 }
2955 }
2956 else if ((insn & 0xf8) == 0xd0)
2957 {
2958 int count = insn & 0x7;
2959 int i;
2960
2961 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2962 for (i = 0; i <= count; i++)
2963 {
2964 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2965 vsp += 8;
2966 }
2967 }
2968 else
2969 {
2970 /* Everything else is "spare". */
2971 return NULL;
2972 }
2973 }
2974
2975 /* If we restore SP from a register, assume this was the frame register.
2976 Otherwise just fall back to SP as frame register. */
2977 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2978 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2979 else
2980 cache->framereg = ARM_SP_REGNUM;
2981
2982 /* Determine offset to previous frame. */
2983 cache->framesize
2984 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2985
2986 /* We already got the previous SP. */
2987 arm_gdbarch_tdep *tdep
2988 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
2989 arm_cache_set_active_sp_value (cache, tdep, vsp);
2990
2991 return cache;
2992 }
2993
2994 /* Unwinding via ARM exception table entries. Note that the sniffer
2995 already computes a filled-in prologue cache, which is then used
2996 with the same arm_prologue_this_id and arm_prologue_prev_register
2997 routines also used for prologue-parsing based unwinding. */
2998
2999 static int
3000 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3001 struct frame_info *this_frame,
3002 void **this_prologue_cache)
3003 {
3004 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3005 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3006 CORE_ADDR addr_in_block, exidx_region, func_start;
3007 struct arm_prologue_cache *cache;
3008 gdb_byte *entry;
3009
3010 /* See if we have an ARM exception table entry covering this address. */
3011 addr_in_block = get_frame_address_in_block (this_frame);
3012 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3013 if (!entry)
3014 return 0;
3015
3016 /* The ARM exception table does not describe unwind information
3017 for arbitrary PC values, but is guaranteed to be correct only
3018 at call sites. We have to decide here whether we want to use
3019 ARM exception table information for this frame, or fall back
3020 to using prologue parsing. (Note that if we have DWARF CFI,
3021 this sniffer isn't even called -- CFI is always preferred.)
3022
3023 Before we make this decision, however, we check whether we
3024 actually have *symbol* information for the current frame.
3025 If not, prologue parsing would not work anyway, so we might
3026 as well use the exception table and hope for the best. */
3027 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3028 {
3029 int exc_valid = 0;
3030
3031 /* If the next frame is "normal", we are at a call site in this
3032 frame, so exception information is guaranteed to be valid. */
3033 if (get_next_frame (this_frame)
3034 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3035 exc_valid = 1;
3036
3037 /* We also assume exception information is valid if we're currently
3038 blocked in a system call. The system library is supposed to
3039 ensure this, so that e.g. pthread cancellation works. */
3040 if (arm_frame_is_thumb (this_frame))
3041 {
3042 ULONGEST insn;
3043
3044 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
3045 2, byte_order_for_code, &insn)
3046 && (insn & 0xff00) == 0xdf00 /* svc */)
3047 exc_valid = 1;
3048 }
3049 else
3050 {
3051 ULONGEST insn;
3052
3053 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
3054 4, byte_order_for_code, &insn)
3055 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3056 exc_valid = 1;
3057 }
3058
3059 /* Bail out if we don't know that exception information is valid. */
3060 if (!exc_valid)
3061 return 0;
3062
3063 /* The ARM exception index does not mark the *end* of the region
3064 covered by the entry, and some functions will not have any entry.
3065 To correctly recognize the end of the covered region, the linker
3066 should have inserted dummy records with a CANTUNWIND marker.
3067
3068 Unfortunately, current versions of GNU ld do not reliably do
3069 this, and thus we may have found an incorrect entry above.
3070 As a (temporary) sanity check, we only use the entry if it
3071 lies *within* the bounds of the function. Note that this check
3072 might reject perfectly valid entries that just happen to cover
3073 multiple functions; therefore this check ought to be removed
3074 once the linker is fixed. */
3075 if (func_start > exidx_region)
3076 return 0;
3077 }
3078
3079 /* Decode the list of unwinding instructions into a prologue cache.
3080 Note that this may fail due to e.g. a "refuse to unwind" code. */
3081 cache = arm_exidx_fill_cache (this_frame, entry);
3082 if (!cache)
3083 return 0;
3084
3085 *this_prologue_cache = cache;
3086 return 1;
3087 }
3088
3089 struct frame_unwind arm_exidx_unwind = {
3090 "arm exidx",
3091 NORMAL_FRAME,
3092 default_frame_unwind_stop_reason,
3093 arm_prologue_this_id,
3094 arm_prologue_prev_register,
3095 NULL,
3096 arm_exidx_unwind_sniffer
3097 };
3098
3099 static struct arm_prologue_cache *
3100 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
3101 {
3102 struct arm_prologue_cache *cache;
3103 int reg;
3104
3105 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3106 arm_cache_init (cache, this_frame);
3107
3108 /* Still rely on the offset calculated from prologue. */
3109 arm_scan_prologue (this_frame, cache);
3110
3111 /* Since we are in epilogue, the SP has been restored. */
3112 arm_gdbarch_tdep *tdep
3113 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3114 arm_cache_set_active_sp_value (cache, tdep,
3115 get_frame_register_unsigned (this_frame,
3116 ARM_SP_REGNUM));
3117
3118 /* Calculate actual addresses of saved registers using offsets
3119 determined by arm_scan_prologue. */
3120 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3121 if (cache->saved_regs[reg].is_addr ())
3122 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3123 + arm_cache_get_prev_sp_value (cache, tdep));
3124
3125 return cache;
3126 }
3127
3128 /* Implementation of function hook 'this_id' in
3129 'struct frame_uwnind' for epilogue unwinder. */
3130
3131 static void
3132 arm_epilogue_frame_this_id (struct frame_info *this_frame,
3133 void **this_cache,
3134 struct frame_id *this_id)
3135 {
3136 struct arm_prologue_cache *cache;
3137 CORE_ADDR pc, func;
3138
3139 if (*this_cache == NULL)
3140 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3141 cache = (struct arm_prologue_cache *) *this_cache;
3142
3143 /* Use function start address as part of the frame ID. If we cannot
3144 identify the start address (due to missing symbol information),
3145 fall back to just using the current PC. */
3146 pc = get_frame_pc (this_frame);
3147 func = get_frame_func (this_frame);
3148 if (func == 0)
3149 func = pc;
3150
3151 arm_gdbarch_tdep *tdep
3152 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3153 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3154 }
3155
3156 /* Implementation of function hook 'prev_register' in
3157 'struct frame_uwnind' for epilogue unwinder. */
3158
3159 static struct value *
3160 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
3161 void **this_cache, int regnum)
3162 {
3163 if (*this_cache == NULL)
3164 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3165
3166 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3167 }
3168
3169 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3170 CORE_ADDR pc);
3171 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3172 CORE_ADDR pc);
3173
3174 /* Implementation of function hook 'sniffer' in
3175 'struct frame_uwnind' for epilogue unwinder. */
3176
3177 static int
3178 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3179 struct frame_info *this_frame,
3180 void **this_prologue_cache)
3181 {
3182 if (frame_relative_level (this_frame) == 0)
3183 {
3184 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3185 CORE_ADDR pc = get_frame_pc (this_frame);
3186
3187 if (arm_frame_is_thumb (this_frame))
3188 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3189 else
3190 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3191 }
3192 else
3193 return 0;
3194 }
3195
3196 /* Frame unwinder from epilogue. */
3197
3198 static const struct frame_unwind arm_epilogue_frame_unwind =
3199 {
3200 "arm epilogue",
3201 NORMAL_FRAME,
3202 default_frame_unwind_stop_reason,
3203 arm_epilogue_frame_this_id,
3204 arm_epilogue_frame_prev_register,
3205 NULL,
3206 arm_epilogue_frame_sniffer,
3207 };
3208
3209 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3210 trampoline, return the target PC. Otherwise return 0.
3211
3212 void call0a (char c, short s, int i, long l) {}
3213
3214 int main (void)
3215 {
3216 (*pointer_to_call0a) (c, s, i, l);
3217 }
3218
3219 Instead of calling a stub library function _call_via_xx (xx is
3220 the register name), GCC may inline the trampoline in the object
3221 file as below (register r2 has the address of call0a).
3222
3223 .global main
3224 .type main, %function
3225 ...
3226 bl .L1
3227 ...
3228 .size main, .-main
3229
3230 .L1:
3231 bx r2
3232
3233 The trampoline 'bx r2' doesn't belong to main. */
3234
3235 static CORE_ADDR
3236 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
3237 {
3238 /* The heuristics of recognizing such trampoline is that FRAME is
3239 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3240 if (arm_frame_is_thumb (frame))
3241 {
3242 gdb_byte buf[2];
3243
3244 if (target_read_memory (pc, buf, 2) == 0)
3245 {
3246 struct gdbarch *gdbarch = get_frame_arch (frame);
3247 enum bfd_endian byte_order_for_code
3248 = gdbarch_byte_order_for_code (gdbarch);
3249 uint16_t insn
3250 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3251
3252 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3253 {
3254 CORE_ADDR dest
3255 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3256
3257 /* Clear the LSB so that gdb core sets step-resume
3258 breakpoint at the right address. */
3259 return UNMAKE_THUMB_ADDR (dest);
3260 }
3261 }
3262 }
3263
3264 return 0;
3265 }
3266
3267 static struct arm_prologue_cache *
3268 arm_make_stub_cache (struct frame_info *this_frame)
3269 {
3270 struct arm_prologue_cache *cache;
3271
3272 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3273 arm_cache_init (cache, this_frame);
3274
3275 arm_gdbarch_tdep *tdep
3276 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3277 arm_cache_set_active_sp_value (cache, tdep,
3278 get_frame_register_unsigned (this_frame,
3279 ARM_SP_REGNUM));
3280
3281 return cache;
3282 }
3283
3284 /* Our frame ID for a stub frame is the current SP and LR. */
3285
3286 static void
3287 arm_stub_this_id (struct frame_info *this_frame,
3288 void **this_cache,
3289 struct frame_id *this_id)
3290 {
3291 struct arm_prologue_cache *cache;
3292
3293 if (*this_cache == NULL)
3294 *this_cache = arm_make_stub_cache (this_frame);
3295 cache = (struct arm_prologue_cache *) *this_cache;
3296
3297 arm_gdbarch_tdep *tdep
3298 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3299 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3300 get_frame_pc (this_frame));
3301 }
3302
3303 static int
3304 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3305 struct frame_info *this_frame,
3306 void **this_prologue_cache)
3307 {
3308 CORE_ADDR addr_in_block;
3309 gdb_byte dummy[4];
3310 CORE_ADDR pc, start_addr;
3311 const char *name;
3312
3313 addr_in_block = get_frame_address_in_block (this_frame);
3314 pc = get_frame_pc (this_frame);
3315 if (in_plt_section (addr_in_block)
3316 /* We also use the stub winder if the target memory is unreadable
3317 to avoid having the prologue unwinder trying to read it. */
3318 || target_read_memory (pc, dummy, 4) != 0)
3319 return 1;
3320
3321 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3322 && arm_skip_bx_reg (this_frame, pc) != 0)
3323 return 1;
3324
3325 return 0;
3326 }
3327
3328 struct frame_unwind arm_stub_unwind = {
3329 "arm stub",
3330 NORMAL_FRAME,
3331 default_frame_unwind_stop_reason,
3332 arm_stub_this_id,
3333 arm_prologue_prev_register,
3334 NULL,
3335 arm_stub_unwind_sniffer
3336 };
3337
3338 /* Put here the code to store, into CACHE->saved_regs, the addresses
3339 of the saved registers of frame described by THIS_FRAME. CACHE is
3340 returned. */
3341
3342 static struct arm_prologue_cache *
3343 arm_m_exception_cache (struct frame_info *this_frame)
3344 {
3345 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3346 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3347 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3348 struct arm_prologue_cache *cache;
3349 CORE_ADDR lr;
3350 CORE_ADDR sp;
3351 CORE_ADDR unwound_sp;
3352 uint32_t sp_r0_offset = 0;
3353 LONGEST xpsr;
3354 uint32_t exc_return;
3355 bool fnc_return;
3356 uint32_t extended_frame_used;
3357 bool secure_stack_used = false;
3358 bool default_callee_register_stacking = false;
3359 bool exception_domain_is_secure = false;
3360
3361 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3362 arm_cache_init (cache, this_frame);
3363
3364 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3365 describes which bits in LR that define which stack was used prior
3366 to the exception and if FPU is used (causing extended stack frame). */
3367
3368 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3369 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3370
3371 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3372 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3373 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3374 reset if Main Extension is implemented, otherwise the value is unknown. */
3375 if (lr == 0xffffffff)
3376 {
3377 /* Terminate any further stack unwinding by referring to self. */
3378 arm_cache_set_active_sp_value (cache, tdep, sp);
3379 return cache;
3380 }
3381
3382 fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3383 if (tdep->have_sec_ext && fnc_return)
3384 {
3385 if (!arm_unwind_secure_frames)
3386 {
3387 warning (_("Non-secure to secure stack unwinding disabled."));
3388
3389 /* Terminate any further stack unwinding by referring to self. */
3390 arm_cache_set_active_sp_value (cache, tdep, sp);
3391 return cache;
3392 }
3393
3394 xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3395 if ((xpsr & 0xff) != 0)
3396 /* Handler mode: This is the mode that exceptions are handled in. */
3397 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3398 else
3399 /* Thread mode: This is the normal mode that programs run in. */
3400 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3401
3402 unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3403
3404 /* Stack layout for a function call from Secure to Non-Secure state
3405 (ARMv8-M section B3.16):
3406
3407 SP Offset
3408
3409 +-------------------+
3410 0x08 | |
3411 +-------------------+ <-- Original SP
3412 0x04 | Partial xPSR |
3413 +-------------------+
3414 0x00 | Return Address |
3415 +===================+ <-- New SP */
3416
3417 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3418 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3419 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3420
3421 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3422
3423 return cache;
3424 }
3425
3426 /* Check EXC_RETURN indicator bits (24-31). */
3427 exc_return = (((lr >> 24) & 0xff) == 0xff);
3428 if (exc_return)
3429 {
3430 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3431 bool process_stack_used = ((lr & (1 << 2)) != 0);
3432
3433 if (tdep->have_sec_ext)
3434 {
3435 secure_stack_used = ((lr & (1 << 6)) != 0);
3436 default_callee_register_stacking = ((lr & (1 << 5)) != 0);
3437 exception_domain_is_secure = ((lr & (1 << 0)) == 0);
3438
3439 /* Unwinding from non-secure to secure can trip security
3440 measures. In order to avoid the debugger being
3441 intrusive, rely on the user to configure the requested
3442 mode. */
3443 if (secure_stack_used && !exception_domain_is_secure
3444 && !arm_unwind_secure_frames)
3445 {
3446 warning (_("Non-secure to secure stack unwinding disabled."));
3447
3448 /* Terminate any further stack unwinding by referring to self. */
3449 arm_cache_set_active_sp_value (cache, tdep, sp);
3450 return cache;
3451 }
3452
3453 if (process_stack_used)
3454 {
3455 if (secure_stack_used)
3456 /* Secure thread (process) stack used, use PSP_S as SP. */
3457 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3458 else
3459 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3460 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_ns_regnum);
3461 }
3462 else
3463 {
3464 if (secure_stack_used)
3465 /* Secure main stack used, use MSP_S as SP. */
3466 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3467 else
3468 /* Non-secure main stack used, use MSP_NS as SP. */
3469 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_ns_regnum);
3470 }
3471 }
3472 else
3473 {
3474 if (process_stack_used)
3475 /* Thread (process) stack used, use PSP as SP. */
3476 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_regnum);
3477 else
3478 /* Main stack used, use MSP as SP. */
3479 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_regnum);
3480 }
3481 }
3482
3483 /* Fetch the SP to use for this frame. */
3484 unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3485
3486 /* Exception entry context stacking are described in ARMv8-M (section B3.19)
3487 and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference Manuals.
3488
3489 The following figure shows the structure of the stack frame when Security
3490 and Floating-point extensions are present.
3491
3492 SP Offsets
3493 Without With
3494 Callee Regs Callee Regs
3495 (Secure -> Non-Secure)
3496 +-------------------+
3497 0xA8 | | 0xD0
3498 +===================+ --+ <-- Original SP
3499 0xA4 | S31 | 0xCC |
3500 +-------------------+ |
3501 ... | Additional FP context
3502 +-------------------+ |
3503 0x68 | S16 | 0x90 |
3504 +===================+ --+
3505 0x64 | Reserved | 0x8C |
3506 +-------------------+ |
3507 0x60 | FPSCR | 0x88 |
3508 +-------------------+ |
3509 0x5C | S15 | 0x84 | FP context
3510 +-------------------+ |
3511 ... |
3512 +-------------------+ |
3513 0x20 | S0 | 0x48 |
3514 +===================+ --+
3515 0x1C | xPSR | 0x44 |
3516 +-------------------+ |
3517 0x18 | Return address | 0x40 |
3518 +-------------------+ |
3519 0x14 | LR(R14) | 0x3C |
3520 +-------------------+ |
3521 0x10 | R12 | 0x38 | State context
3522 +-------------------+ |
3523 0x0C | R3 | 0x34 |
3524 +-------------------+ |
3525 ... |
3526 +-------------------+ |
3527 0x00 | R0 | 0x28 |
3528 +===================+ --+
3529 | R11 | 0x24 |
3530 +-------------------+ |
3531 ... |
3532 +-------------------+ | Additional state context
3533 | R4 | 0x08 | when transitioning from
3534 +-------------------+ | Secure to Non-Secure
3535 | Reserved | 0x04 |
3536 +-------------------+ |
3537 | Magic signature | 0x00 |
3538 +===================+ --+ <-- New SP */
3539
3540 /* With the Security extension, the hardware saves R4..R11 too. */
3541 if (exc_return && tdep->have_sec_ext && secure_stack_used
3542 && (!default_callee_register_stacking || exception_domain_is_secure))
3543 {
3544 /* Read R4..R11 from the integer callee registers. */
3545 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3546 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3547 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3548 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3549 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3550 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3551 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3552 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3553 sp_r0_offset = 0x28;
3554 }
3555
3556 /* The hardware saves eight 32-bit words, comprising xPSR,
3557 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3558 "B1.5.6 Exception entry behavior" in
3559 "ARMv7-M Architecture Reference Manual". */
3560 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3561 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3562 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3563 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3564 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x10);
3565 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x14);
3566 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x18);
3567 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x1C);
3568
3569 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3570 type used. */
3571 extended_frame_used = ((lr & (1 << 4)) == 0);
3572 if (exc_return && extended_frame_used)
3573 {
3574 int i;
3575 int fpu_regs_stack_offset;
3576
3577 /* This code does not take into account the lazy stacking, see "Lazy
3578 context save of FP state", in B1.5.7, also ARM AN298, supported
3579 by Cortex-M4F architecture.
3580 To fully handle this the FPCCR register (Floating-point Context
3581 Control Register) needs to be read out and the bits ASPEN and LSPEN
3582 could be checked to setup correct lazy stacked FP registers.
3583 This register is located at address 0xE000EF34. */
3584
3585 /* Extended stack frame type used. */
3586 fpu_regs_stack_offset = unwound_sp + sp_r0_offset + 0x20;
3587 for (i = 0; i < 8; i++)
3588 {
3589 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3590 fpu_regs_stack_offset += 8;
3591 }
3592 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + sp_r0_offset + 0x60);
3593 fpu_regs_stack_offset += 4;
3594
3595 if (tdep->have_sec_ext && !default_callee_register_stacking)
3596 {
3597 /* Handle floating-point callee saved registers. */
3598 fpu_regs_stack_offset = unwound_sp + sp_r0_offset + 0x68;
3599 for (i = 8; i < 16; i++)
3600 {
3601 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3602 fpu_regs_stack_offset += 8;
3603 }
3604
3605 arm_cache_set_active_sp_value (cache, tdep,
3606 unwound_sp + sp_r0_offset + 0xA8);
3607 }
3608 else
3609 {
3610 /* Offset 0x64 is reserved. */
3611 arm_cache_set_active_sp_value (cache, tdep,
3612 unwound_sp + sp_r0_offset + 0x68);
3613 }
3614 }
3615 else
3616 {
3617 /* Standard stack frame type used. */
3618 arm_cache_set_active_sp_value (cache, tdep,
3619 unwound_sp + sp_r0_offset + 0x20);
3620 }
3621
3622 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3623 aligner between the top of the 32-byte stack frame and the
3624 previous context's stack pointer. */
3625 if (safe_read_memory_integer (unwound_sp + sp_r0_offset + 0x1C, 4,
3626 byte_order, &xpsr)
3627 && (xpsr & (1 << 9)) != 0)
3628 arm_cache_set_active_sp_value (cache, tdep,
3629 arm_cache_get_prev_sp_value (cache, tdep) + 4);
3630
3631 return cache;
3632 }
3633
3634 /* Implementation of function hook 'this_id' in
3635 'struct frame_uwnind'. */
3636
3637 static void
3638 arm_m_exception_this_id (struct frame_info *this_frame,
3639 void **this_cache,
3640 struct frame_id *this_id)
3641 {
3642 struct arm_prologue_cache *cache;
3643
3644 if (*this_cache == NULL)
3645 *this_cache = arm_m_exception_cache (this_frame);
3646 cache = (struct arm_prologue_cache *) *this_cache;
3647
3648 /* Our frame ID for a stub frame is the current SP and LR. */
3649 arm_gdbarch_tdep *tdep
3650 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3651 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3652 get_frame_pc (this_frame));
3653 }
3654
3655 /* Implementation of function hook 'prev_register' in
3656 'struct frame_uwnind'. */
3657
3658 static struct value *
3659 arm_m_exception_prev_register (struct frame_info *this_frame,
3660 void **this_cache,
3661 int prev_regnum)
3662 {
3663 struct arm_prologue_cache *cache;
3664 CORE_ADDR sp_value;
3665
3666 if (*this_cache == NULL)
3667 *this_cache = arm_m_exception_cache (this_frame);
3668 cache = (struct arm_prologue_cache *) *this_cache;
3669
3670 /* The value was already reconstructed into PREV_SP. */
3671 arm_gdbarch_tdep *tdep
3672 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3673 if (prev_regnum == ARM_SP_REGNUM)
3674 return frame_unwind_got_constant (this_frame, prev_regnum,
3675 arm_cache_get_prev_sp_value (cache, tdep));
3676
3677 /* If we are asked to unwind the PC, strip the saved T bit. */
3678 if (prev_regnum == ARM_PC_REGNUM)
3679 {
3680 struct value *value = trad_frame_get_prev_register (this_frame,
3681 cache->saved_regs,
3682 prev_regnum);
3683 CORE_ADDR pc = value_as_address (value);
3684 return frame_unwind_got_constant (this_frame, prev_regnum,
3685 UNMAKE_THUMB_ADDR (pc));
3686 }
3687
3688 /* The value might be one of the alternative SP, if so, use the
3689 value already constructed. */
3690 if (arm_cache_is_sp_register (cache, tdep, prev_regnum))
3691 {
3692 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3693 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3694 }
3695
3696 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3697 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3698 pattern. */
3699 if (prev_regnum == ARM_PS_REGNUM)
3700 {
3701 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3702 struct value *value = trad_frame_get_prev_register (this_frame,
3703 cache->saved_regs,
3704 ARM_PC_REGNUM);
3705 CORE_ADDR pc = value_as_address (value);
3706 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3707 ARM_PS_REGNUM);
3708 ULONGEST xpsr = value_as_long (value);
3709
3710 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3711 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3712 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3713 }
3714
3715 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3716 prev_regnum);
3717 }
3718
3719 /* Implementation of function hook 'sniffer' in
3720 'struct frame_uwnind'. */
3721
3722 static int
3723 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3724 struct frame_info *this_frame,
3725 void **this_prologue_cache)
3726 {
3727 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3728 CORE_ADDR this_pc = get_frame_pc (this_frame);
3729
3730 /* No need to check is_m; this sniffer is only registered for
3731 M-profile architectures. */
3732
3733 /* Check if exception frame returns to a magic PC value. */
3734 return arm_m_addr_is_magic (gdbarch, this_pc);
3735 }
3736
3737 /* Frame unwinder for M-profile exceptions. */
3738
3739 struct frame_unwind arm_m_exception_unwind =
3740 {
3741 "arm m exception",
3742 SIGTRAMP_FRAME,
3743 default_frame_unwind_stop_reason,
3744 arm_m_exception_this_id,
3745 arm_m_exception_prev_register,
3746 NULL,
3747 arm_m_exception_unwind_sniffer
3748 };
3749
3750 static CORE_ADDR
3751 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3752 {
3753 struct arm_prologue_cache *cache;
3754
3755 if (*this_cache == NULL)
3756 *this_cache = arm_make_prologue_cache (this_frame);
3757 cache = (struct arm_prologue_cache *) *this_cache;
3758
3759 arm_gdbarch_tdep *tdep
3760 = (arm_gdbarch_tdep *) gdbarch_tdep (get_frame_arch (this_frame));
3761 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3762 }
3763
3764 struct frame_base arm_normal_base = {
3765 &arm_prologue_unwind,
3766 arm_normal_frame_base,
3767 arm_normal_frame_base,
3768 arm_normal_frame_base
3769 };
3770
3771 static struct value *
3772 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3773 int regnum)
3774 {
3775 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3776 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3777 CORE_ADDR lr;
3778 ULONGEST cpsr;
3779
3780 switch (regnum)
3781 {
3782 case ARM_PC_REGNUM:
3783 /* The PC is normally copied from the return column, which
3784 describes saves of LR. However, that version may have an
3785 extra bit set to indicate Thumb state. The bit is not
3786 part of the PC. */
3787
3788 /* Record in the frame whether the return address was signed. */
3789 if (tdep->have_pacbti)
3790 {
3791 CORE_ADDR ra_auth_code
3792 = frame_unwind_register_unsigned (this_frame,
3793 tdep->pacbti_pseudo_base);
3794
3795 if (ra_auth_code != 0)
3796 set_frame_previous_pc_masked (this_frame);
3797 }
3798
3799 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3800 return frame_unwind_got_constant (this_frame, regnum,
3801 arm_addr_bits_remove (gdbarch, lr));
3802
3803 case ARM_PS_REGNUM:
3804 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3805 cpsr = get_frame_register_unsigned (this_frame, regnum);
3806 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3807 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
3808 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3809
3810 default:
3811 internal_error (__FILE__, __LINE__,
3812 _("Unexpected register %d"), regnum);
3813 }
3814 }
3815
3816 /* Implement the stack_frame_destroyed_p gdbarch method. */
3817
3818 static int
3819 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3820 {
3821 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3822 unsigned int insn, insn2;
3823 int found_return = 0, found_stack_adjust = 0;
3824 CORE_ADDR func_start, func_end;
3825 CORE_ADDR scan_pc;
3826 gdb_byte buf[4];
3827
3828 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3829 return 0;
3830
3831 /* The epilogue is a sequence of instructions along the following lines:
3832
3833 - add stack frame size to SP or FP
3834 - [if frame pointer used] restore SP from FP
3835 - restore registers from SP [may include PC]
3836 - a return-type instruction [if PC wasn't already restored]
3837
3838 In a first pass, we scan forward from the current PC and verify the
3839 instructions we find as compatible with this sequence, ending in a
3840 return instruction.
3841
3842 However, this is not sufficient to distinguish indirect function calls
3843 within a function from indirect tail calls in the epilogue in some cases.
3844 Therefore, if we didn't already find any SP-changing instruction during
3845 forward scan, we add a backward scanning heuristic to ensure we actually
3846 are in the epilogue. */
3847
3848 scan_pc = pc;
3849 while (scan_pc < func_end && !found_return)
3850 {
3851 if (target_read_memory (scan_pc, buf, 2))
3852 break;
3853
3854 scan_pc += 2;
3855 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3856
3857 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3858 found_return = 1;
3859 else if (insn == 0x46f7) /* mov pc, lr */
3860 found_return = 1;
3861 else if (thumb_instruction_restores_sp (insn))
3862 {
3863 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3864 found_return = 1;
3865 }
3866 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3867 {
3868 if (target_read_memory (scan_pc, buf, 2))
3869 break;
3870
3871 scan_pc += 2;
3872 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3873
3874 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3875 {
3876 if (insn2 & 0x8000) /* <registers> include PC. */
3877 found_return = 1;
3878 }
3879 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3880 && (insn2 & 0x0fff) == 0x0b04)
3881 {
3882 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3883 found_return = 1;
3884 }
3885 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3886 && (insn2 & 0x0e00) == 0x0a00)
3887 ;
3888 else
3889 break;
3890 }
3891 else
3892 break;
3893 }
3894
3895 if (!found_return)
3896 return 0;
3897
3898 /* Since any instruction in the epilogue sequence, with the possible
3899 exception of return itself, updates the stack pointer, we need to
3900 scan backwards for at most one instruction. Try either a 16-bit or
3901 a 32-bit instruction. This is just a heuristic, so we do not worry
3902 too much about false positives. */
3903
3904 if (pc - 4 < func_start)
3905 return 0;
3906 if (target_read_memory (pc - 4, buf, 4))
3907 return 0;
3908
3909 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3910 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3911
3912 if (thumb_instruction_restores_sp (insn2))
3913 found_stack_adjust = 1;
3914 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3915 found_stack_adjust = 1;
3916 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3917 && (insn2 & 0x0fff) == 0x0b04)
3918 found_stack_adjust = 1;
3919 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3920 && (insn2 & 0x0e00) == 0x0a00)
3921 found_stack_adjust = 1;
3922
3923 return found_stack_adjust;
3924 }
3925
3926 static int
3927 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3928 {
3929 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3930 unsigned int insn;
3931 int found_return;
3932 CORE_ADDR func_start, func_end;
3933
3934 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3935 return 0;
3936
3937 /* We are in the epilogue if the previous instruction was a stack
3938 adjustment and the next instruction is a possible return (bx, mov
3939 pc, or pop). We could have to scan backwards to find the stack
3940 adjustment, or forwards to find the return, but this is a decent
3941 approximation. First scan forwards. */
3942
3943 found_return = 0;
3944 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3945 if (bits (insn, 28, 31) != INST_NV)
3946 {
3947 if ((insn & 0x0ffffff0) == 0x012fff10)
3948 /* BX. */
3949 found_return = 1;
3950 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3951 /* MOV PC. */
3952 found_return = 1;
3953 else if ((insn & 0x0fff0000) == 0x08bd0000
3954 && (insn & 0x0000c000) != 0)
3955 /* POP (LDMIA), including PC or LR. */
3956 found_return = 1;
3957 }
3958
3959 if (!found_return)
3960 return 0;
3961
3962 /* Scan backwards. This is just a heuristic, so do not worry about
3963 false positives from mode changes. */
3964
3965 if (pc < func_start + 4)
3966 return 0;
3967
3968 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3969 if (arm_instruction_restores_sp (insn))
3970 return 1;
3971
3972 return 0;
3973 }
3974
3975 /* Implement the stack_frame_destroyed_p gdbarch method. */
3976
3977 static int
3978 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3979 {
3980 if (arm_pc_is_thumb (gdbarch, pc))
3981 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3982 else
3983 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3984 }
3985
3986 /* When arguments must be pushed onto the stack, they go on in reverse
3987 order. The code below implements a FILO (stack) to do this. */
3988
3989 struct arm_stack_item
3990 {
3991 int len;
3992 struct arm_stack_item *prev;
3993 gdb_byte *data;
3994 };
3995
3996 static struct arm_stack_item *
3997 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
3998 int len)
3999 {
4000 struct arm_stack_item *si;
4001 si = XNEW (struct arm_stack_item);
4002 si->data = (gdb_byte *) xmalloc (len);
4003 si->len = len;
4004 si->prev = prev;
4005 memcpy (si->data, contents, len);
4006 return si;
4007 }
4008
4009 static struct arm_stack_item *
4010 pop_stack_item (struct arm_stack_item *si)
4011 {
4012 struct arm_stack_item *dead = si;
4013 si = si->prev;
4014 xfree (dead->data);
4015 xfree (dead);
4016 return si;
4017 }
4018
4019 /* Implement the gdbarch type alignment method, overrides the generic
4020 alignment algorithm for anything that is arm specific. */
4021
4022 static ULONGEST
4023 arm_type_align (gdbarch *gdbarch, struct type *t)
4024 {
4025 t = check_typedef (t);
4026 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4027 {
4028 /* Use the natural alignment for vector types (the same for
4029 scalar type), but the maximum alignment is 64-bit. */
4030 if (TYPE_LENGTH (t) > 8)
4031 return 8;
4032 else
4033 return TYPE_LENGTH (t);
4034 }
4035
4036 /* Allow the common code to calculate the alignment. */
4037 return 0;
4038 }
4039
4040 /* Possible base types for a candidate for passing and returning in
4041 VFP registers. */
4042
4043 enum arm_vfp_cprc_base_type
4044 {
4045 VFP_CPRC_UNKNOWN,
4046 VFP_CPRC_SINGLE,
4047 VFP_CPRC_DOUBLE,
4048 VFP_CPRC_VEC64,
4049 VFP_CPRC_VEC128
4050 };
4051
4052 /* The length of one element of base type B. */
4053
4054 static unsigned
4055 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4056 {
4057 switch (b)
4058 {
4059 case VFP_CPRC_SINGLE:
4060 return 4;
4061 case VFP_CPRC_DOUBLE:
4062 return 8;
4063 case VFP_CPRC_VEC64:
4064 return 8;
4065 case VFP_CPRC_VEC128:
4066 return 16;
4067 default:
4068 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
4069 (int) b);
4070 }
4071 }
4072
4073 /* The character ('s', 'd' or 'q') for the type of VFP register used
4074 for passing base type B. */
4075
4076 static int
4077 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4078 {
4079 switch (b)
4080 {
4081 case VFP_CPRC_SINGLE:
4082 return 's';
4083 case VFP_CPRC_DOUBLE:
4084 return 'd';
4085 case VFP_CPRC_VEC64:
4086 return 'd';
4087 case VFP_CPRC_VEC128:
4088 return 'q';
4089 default:
4090 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
4091 (int) b);
4092 }
4093 }
4094
4095 /* Determine whether T may be part of a candidate for passing and
4096 returning in VFP registers, ignoring the limit on the total number
4097 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4098 classification of the first valid component found; if it is not
4099 VFP_CPRC_UNKNOWN, all components must have the same classification
4100 as *BASE_TYPE. If it is found that T contains a type not permitted
4101 for passing and returning in VFP registers, a type differently
4102 classified from *BASE_TYPE, or two types differently classified
4103 from each other, return -1, otherwise return the total number of
4104 base-type elements found (possibly 0 in an empty structure or
4105 array). Vector types are not currently supported, matching the
4106 generic AAPCS support. */
4107
4108 static int
4109 arm_vfp_cprc_sub_candidate (struct type *t,
4110 enum arm_vfp_cprc_base_type *base_type)
4111 {
4112 t = check_typedef (t);
4113 switch (t->code ())
4114 {
4115 case TYPE_CODE_FLT:
4116 switch (TYPE_LENGTH (t))
4117 {
4118 case 4:
4119 if (*base_type == VFP_CPRC_UNKNOWN)
4120 *base_type = VFP_CPRC_SINGLE;
4121 else if (*base_type != VFP_CPRC_SINGLE)
4122 return -1;
4123 return 1;
4124
4125 case 8:
4126 if (*base_type == VFP_CPRC_UNKNOWN)
4127 *base_type = VFP_CPRC_DOUBLE;
4128 else if (*base_type != VFP_CPRC_DOUBLE)
4129 return -1;
4130 return 1;
4131
4132 default:
4133 return -1;
4134 }
4135 break;
4136
4137 case TYPE_CODE_COMPLEX:
4138 /* Arguments of complex T where T is one of the types float or
4139 double get treated as if they are implemented as:
4140
4141 struct complexT
4142 {
4143 T real;
4144 T imag;
4145 };
4146
4147 */
4148 switch (TYPE_LENGTH (t))
4149 {
4150 case 8:
4151 if (*base_type == VFP_CPRC_UNKNOWN)
4152 *base_type = VFP_CPRC_SINGLE;
4153 else if (*base_type != VFP_CPRC_SINGLE)
4154 return -1;
4155 return 2;
4156
4157 case 16:
4158 if (*base_type == VFP_CPRC_UNKNOWN)
4159 *base_type = VFP_CPRC_DOUBLE;
4160 else if (*base_type != VFP_CPRC_DOUBLE)
4161 return -1;
4162 return 2;
4163
4164 default:
4165 return -1;
4166 }
4167 break;
4168
4169 case TYPE_CODE_ARRAY:
4170 {
4171 if (t->is_vector ())
4172 {
4173 /* A 64-bit or 128-bit containerized vector type are VFP
4174 CPRCs. */
4175 switch (TYPE_LENGTH (t))
4176 {
4177 case 8:
4178 if (*base_type == VFP_CPRC_UNKNOWN)
4179 *base_type = VFP_CPRC_VEC64;
4180 return 1;
4181 case 16:
4182 if (*base_type == VFP_CPRC_UNKNOWN)
4183 *base_type = VFP_CPRC_VEC128;
4184 return 1;
4185 default:
4186 return -1;
4187 }
4188 }
4189 else
4190 {
4191 int count;
4192 unsigned unitlen;
4193
4194 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
4195 base_type);
4196 if (count == -1)
4197 return -1;
4198 if (TYPE_LENGTH (t) == 0)
4199 {
4200 gdb_assert (count == 0);
4201 return 0;
4202 }
4203 else if (count == 0)
4204 return -1;
4205 unitlen = arm_vfp_cprc_unit_length (*base_type);
4206 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
4207 return TYPE_LENGTH (t) / unitlen;
4208 }
4209 }
4210 break;
4211
4212 case TYPE_CODE_STRUCT:
4213 {
4214 int count = 0;
4215 unsigned unitlen;
4216 int i;
4217 for (i = 0; i < t->num_fields (); i++)
4218 {
4219 int sub_count = 0;
4220
4221 if (!field_is_static (&t->field (i)))
4222 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4223 base_type);
4224 if (sub_count == -1)
4225 return -1;
4226 count += sub_count;
4227 }
4228 if (TYPE_LENGTH (t) == 0)
4229 {
4230 gdb_assert (count == 0);
4231 return 0;
4232 }
4233 else if (count == 0)
4234 return -1;
4235 unitlen = arm_vfp_cprc_unit_length (*base_type);
4236 if (TYPE_LENGTH (t) != unitlen * count)
4237 return -1;
4238 return count;
4239 }
4240
4241 case TYPE_CODE_UNION:
4242 {
4243 int count = 0;
4244 unsigned unitlen;
4245 int i;
4246 for (i = 0; i < t->num_fields (); i++)
4247 {
4248 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4249 base_type);
4250 if (sub_count == -1)
4251 return -1;
4252 count = (count > sub_count ? count : sub_count);
4253 }
4254 if (TYPE_LENGTH (t) == 0)
4255 {
4256 gdb_assert (count == 0);
4257 return 0;
4258 }
4259 else if (count == 0)
4260 return -1;
4261 unitlen = arm_vfp_cprc_unit_length (*base_type);
4262 if (TYPE_LENGTH (t) != unitlen * count)
4263 return -1;
4264 return count;
4265 }
4266
4267 default:
4268 break;
4269 }
4270
4271 return -1;
4272 }
4273
4274 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4275 if passed to or returned from a non-variadic function with the VFP
4276 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4277 *BASE_TYPE to the base type for T and *COUNT to the number of
4278 elements of that base type before returning. */
4279
4280 static int
4281 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4282 int *count)
4283 {
4284 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4285 int c = arm_vfp_cprc_sub_candidate (t, &b);
4286 if (c <= 0 || c > 4)
4287 return 0;
4288 *base_type = b;
4289 *count = c;
4290 return 1;
4291 }
4292
4293 /* Return 1 if the VFP ABI should be used for passing arguments to and
4294 returning values from a function of type FUNC_TYPE, 0
4295 otherwise. */
4296
4297 static int
4298 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4299 {
4300 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4301
4302 /* Variadic functions always use the base ABI. Assume that functions
4303 without debug info are not variadic. */
4304 if (func_type && check_typedef (func_type)->has_varargs ())
4305 return 0;
4306
4307 /* The VFP ABI is only supported as a variant of AAPCS. */
4308 if (tdep->arm_abi != ARM_ABI_AAPCS)
4309 return 0;
4310
4311 return tdep->fp_model == ARM_FLOAT_VFP;
4312 }
4313
4314 /* We currently only support passing parameters in integer registers, which
4315 conforms with GCC's default model, and VFP argument passing following
4316 the VFP variant of AAPCS. Several other variants exist and
4317 we should probably support some of them based on the selected ABI. */
4318
4319 static CORE_ADDR
4320 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4321 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4322 struct value **args, CORE_ADDR sp,
4323 function_call_return_method return_method,
4324 CORE_ADDR struct_addr)
4325 {
4326 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4327 int argnum;
4328 int argreg;
4329 int nstack;
4330 struct arm_stack_item *si = NULL;
4331 int use_vfp_abi;
4332 struct type *ftype;
4333 unsigned vfp_regs_free = (1 << 16) - 1;
4334 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4335
4336 /* Determine the type of this function and whether the VFP ABI
4337 applies. */
4338 ftype = check_typedef (value_type (function));
4339 if (ftype->code () == TYPE_CODE_PTR)
4340 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
4341 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4342
4343 /* Set the return address. For the ARM, the return breakpoint is
4344 always at BP_ADDR. */
4345 if (arm_pc_is_thumb (gdbarch, bp_addr))
4346 bp_addr |= 1;
4347 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4348
4349 /* Walk through the list of args and determine how large a temporary
4350 stack is required. Need to take care here as structs may be
4351 passed on the stack, and we have to push them. */
4352 nstack = 0;
4353
4354 argreg = ARM_A1_REGNUM;
4355 nstack = 0;
4356
4357 /* The struct_return pointer occupies the first parameter
4358 passing register. */
4359 if (return_method == return_method_struct)
4360 {
4361 arm_debug_printf ("struct return in %s = %s",
4362 gdbarch_register_name (gdbarch, argreg),
4363 paddress (gdbarch, struct_addr));
4364
4365 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4366 argreg++;
4367 }
4368
4369 for (argnum = 0; argnum < nargs; argnum++)
4370 {
4371 int len;
4372 struct type *arg_type;
4373 struct type *target_type;
4374 enum type_code typecode;
4375 const bfd_byte *val;
4376 int align;
4377 enum arm_vfp_cprc_base_type vfp_base_type;
4378 int vfp_base_count;
4379 int may_use_core_reg = 1;
4380
4381 arg_type = check_typedef (value_type (args[argnum]));
4382 len = TYPE_LENGTH (arg_type);
4383 target_type = TYPE_TARGET_TYPE (arg_type);
4384 typecode = arg_type->code ();
4385 val = value_contents (args[argnum]).data ();
4386
4387 align = type_align (arg_type);
4388 /* Round alignment up to a whole number of words. */
4389 align = (align + ARM_INT_REGISTER_SIZE - 1)
4390 & ~(ARM_INT_REGISTER_SIZE - 1);
4391 /* Different ABIs have different maximum alignments. */
4392 if (tdep->arm_abi == ARM_ABI_APCS)
4393 {
4394 /* The APCS ABI only requires word alignment. */
4395 align = ARM_INT_REGISTER_SIZE;
4396 }
4397 else
4398 {
4399 /* The AAPCS requires at most doubleword alignment. */
4400 if (align > ARM_INT_REGISTER_SIZE * 2)
4401 align = ARM_INT_REGISTER_SIZE * 2;
4402 }
4403
4404 if (use_vfp_abi
4405 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4406 &vfp_base_count))
4407 {
4408 int regno;
4409 int unit_length;
4410 int shift;
4411 unsigned mask;
4412
4413 /* Because this is a CPRC it cannot go in a core register or
4414 cause a core register to be skipped for alignment.
4415 Either it goes in VFP registers and the rest of this loop
4416 iteration is skipped for this argument, or it goes on the
4417 stack (and the stack alignment code is correct for this
4418 case). */
4419 may_use_core_reg = 0;
4420
4421 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4422 shift = unit_length / 4;
4423 mask = (1 << (shift * vfp_base_count)) - 1;
4424 for (regno = 0; regno < 16; regno += shift)
4425 if (((vfp_regs_free >> regno) & mask) == mask)
4426 break;
4427
4428 if (regno < 16)
4429 {
4430 int reg_char;
4431 int reg_scaled;
4432 int i;
4433
4434 vfp_regs_free &= ~(mask << regno);
4435 reg_scaled = regno / shift;
4436 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4437 for (i = 0; i < vfp_base_count; i++)
4438 {
4439 char name_buf[4];
4440 int regnum;
4441 if (reg_char == 'q')
4442 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4443 val + i * unit_length);
4444 else
4445 {
4446 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4447 reg_char, reg_scaled + i);
4448 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4449 strlen (name_buf));
4450 regcache->cooked_write (regnum, val + i * unit_length);
4451 }
4452 }
4453 continue;
4454 }
4455 else
4456 {
4457 /* This CPRC could not go in VFP registers, so all VFP
4458 registers are now marked as used. */
4459 vfp_regs_free = 0;
4460 }
4461 }
4462
4463 /* Push stack padding for doubleword alignment. */
4464 if (nstack & (align - 1))
4465 {
4466 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4467 nstack += ARM_INT_REGISTER_SIZE;
4468 }
4469
4470 /* Doubleword aligned quantities must go in even register pairs. */
4471 if (may_use_core_reg
4472 && argreg <= ARM_LAST_ARG_REGNUM
4473 && align > ARM_INT_REGISTER_SIZE
4474 && argreg & 1)
4475 argreg++;
4476
4477 /* If the argument is a pointer to a function, and it is a
4478 Thumb function, create a LOCAL copy of the value and set
4479 the THUMB bit in it. */
4480 if (TYPE_CODE_PTR == typecode
4481 && target_type != NULL
4482 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4483 {
4484 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4485 if (arm_pc_is_thumb (gdbarch, regval))
4486 {
4487 bfd_byte *copy = (bfd_byte *) alloca (len);
4488 store_unsigned_integer (copy, len, byte_order,
4489 MAKE_THUMB_ADDR (regval));
4490 val = copy;
4491 }
4492 }
4493
4494 /* Copy the argument to general registers or the stack in
4495 register-sized pieces. Large arguments are split between
4496 registers and stack. */
4497 while (len > 0)
4498 {
4499 int partial_len = len < ARM_INT_REGISTER_SIZE
4500 ? len : ARM_INT_REGISTER_SIZE;
4501 CORE_ADDR regval
4502 = extract_unsigned_integer (val, partial_len, byte_order);
4503
4504 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4505 {
4506 /* The argument is being passed in a general purpose
4507 register. */
4508 if (byte_order == BFD_ENDIAN_BIG)
4509 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4510
4511 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4512 gdbarch_register_name (gdbarch, argreg),
4513 phex (regval, ARM_INT_REGISTER_SIZE));
4514
4515 regcache_cooked_write_unsigned (regcache, argreg, regval);
4516 argreg++;
4517 }
4518 else
4519 {
4520 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4521
4522 memset (buf, 0, sizeof (buf));
4523 store_unsigned_integer (buf, partial_len, byte_order, regval);
4524
4525 /* Push the arguments onto the stack. */
4526 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4527 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4528 nstack += ARM_INT_REGISTER_SIZE;
4529 }
4530
4531 len -= partial_len;
4532 val += partial_len;
4533 }
4534 }
4535 /* If we have an odd number of words to push, then decrement the stack
4536 by one word now, so first stack argument will be dword aligned. */
4537 if (nstack & 4)
4538 sp -= 4;
4539
4540 while (si)
4541 {
4542 sp -= si->len;
4543 write_memory (sp, si->data, si->len);
4544 si = pop_stack_item (si);
4545 }
4546
4547 /* Finally, update teh SP register. */
4548 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4549
4550 return sp;
4551 }
4552
4553
4554 /* Always align the frame to an 8-byte boundary. This is required on
4555 some platforms and harmless on the rest. */
4556
4557 static CORE_ADDR
4558 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4559 {
4560 /* Align the stack to eight bytes. */
4561 return sp & ~ (CORE_ADDR) 7;
4562 }
4563
4564 static void
4565 print_fpu_flags (struct ui_file *file, int flags)
4566 {
4567 if (flags & (1 << 0))
4568 gdb_puts ("IVO ", file);
4569 if (flags & (1 << 1))
4570 gdb_puts ("DVZ ", file);
4571 if (flags & (1 << 2))
4572 gdb_puts ("OFL ", file);
4573 if (flags & (1 << 3))
4574 gdb_puts ("UFL ", file);
4575 if (flags & (1 << 4))
4576 gdb_puts ("INX ", file);
4577 gdb_putc ('\n', file);
4578 }
4579
4580 /* Print interesting information about the floating point processor
4581 (if present) or emulator. */
4582 static void
4583 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4584 struct frame_info *frame, const char *args)
4585 {
4586 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4587 int type;
4588
4589 type = (status >> 24) & 127;
4590 if (status & (1 << 31))
4591 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4592 else
4593 gdb_printf (file, _("Software FPU type %d\n"), type);
4594 /* i18n: [floating point unit] mask */
4595 gdb_puts (_("mask: "), file);
4596 print_fpu_flags (file, status >> 16);
4597 /* i18n: [floating point unit] flags */
4598 gdb_puts (_("flags: "), file);
4599 print_fpu_flags (file, status);
4600 }
4601
4602 /* Construct the ARM extended floating point type. */
4603 static struct type *
4604 arm_ext_type (struct gdbarch *gdbarch)
4605 {
4606 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4607
4608 if (!tdep->arm_ext_type)
4609 tdep->arm_ext_type
4610 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4611 floatformats_arm_ext);
4612
4613 return tdep->arm_ext_type;
4614 }
4615
4616 static struct type *
4617 arm_neon_double_type (struct gdbarch *gdbarch)
4618 {
4619 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4620
4621 if (tdep->neon_double_type == NULL)
4622 {
4623 struct type *t, *elem;
4624
4625 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4626 TYPE_CODE_UNION);
4627 elem = builtin_type (gdbarch)->builtin_uint8;
4628 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4629 elem = builtin_type (gdbarch)->builtin_uint16;
4630 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4631 elem = builtin_type (gdbarch)->builtin_uint32;
4632 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4633 elem = builtin_type (gdbarch)->builtin_uint64;
4634 append_composite_type_field (t, "u64", elem);
4635 elem = builtin_type (gdbarch)->builtin_float;
4636 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4637 elem = builtin_type (gdbarch)->builtin_double;
4638 append_composite_type_field (t, "f64", elem);
4639
4640 t->set_is_vector (true);
4641 t->set_name ("neon_d");
4642 tdep->neon_double_type = t;
4643 }
4644
4645 return tdep->neon_double_type;
4646 }
4647
4648 /* FIXME: The vector types are not correctly ordered on big-endian
4649 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4650 bits of d0 - regardless of what unit size is being held in d0. So
4651 the offset of the first uint8 in d0 is 7, but the offset of the
4652 first float is 4. This code works as-is for little-endian
4653 targets. */
4654
4655 static struct type *
4656 arm_neon_quad_type (struct gdbarch *gdbarch)
4657 {
4658 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4659
4660 if (tdep->neon_quad_type == NULL)
4661 {
4662 struct type *t, *elem;
4663
4664 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4665 TYPE_CODE_UNION);
4666 elem = builtin_type (gdbarch)->builtin_uint8;
4667 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4668 elem = builtin_type (gdbarch)->builtin_uint16;
4669 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4670 elem = builtin_type (gdbarch)->builtin_uint32;
4671 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4672 elem = builtin_type (gdbarch)->builtin_uint64;
4673 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4674 elem = builtin_type (gdbarch)->builtin_float;
4675 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4676 elem = builtin_type (gdbarch)->builtin_double;
4677 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4678
4679 t->set_is_vector (true);
4680 t->set_name ("neon_q");
4681 tdep->neon_quad_type = t;
4682 }
4683
4684 return tdep->neon_quad_type;
4685 }
4686
4687 /* Return true if REGNUM is a Q pseudo register. Return false
4688 otherwise.
4689
4690 REGNUM is the raw register number and not a pseudo-relative register
4691 number. */
4692
4693 static bool
4694 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4695 {
4696 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4697
4698 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4699 MVE (Q0~Q7) features. */
4700 if (tdep->have_q_pseudos
4701 && regnum >= tdep->q_pseudo_base
4702 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4703 return true;
4704
4705 return false;
4706 }
4707
4708 /* Return true if REGNUM is a VFP S pseudo register. Return false
4709 otherwise.
4710
4711 REGNUM is the raw register number and not a pseudo-relative register
4712 number. */
4713
4714 static bool
4715 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4716 {
4717 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4718
4719 if (tdep->have_s_pseudos
4720 && regnum >= tdep->s_pseudo_base
4721 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4722 return true;
4723
4724 return false;
4725 }
4726
4727 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4728 otherwise.
4729
4730 REGNUM is the raw register number and not a pseudo-relative register
4731 number. */
4732
4733 static bool
4734 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4735 {
4736 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4737
4738 if (tdep->have_mve
4739 && regnum >= tdep->mve_pseudo_base
4740 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4741 return true;
4742
4743 return false;
4744 }
4745
4746 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
4747 false otherwise.
4748
4749 REGNUM is the raw register number and not a pseudo-relative register
4750 number. */
4751
4752 static bool
4753 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
4754 {
4755 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4756
4757 if (tdep->have_pacbti
4758 && regnum >= tdep->pacbti_pseudo_base
4759 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
4760 return true;
4761
4762 return false;
4763 }
4764
4765 /* Return the GDB type object for the "standard" data type of data in
4766 register N. */
4767
4768 static struct type *
4769 arm_register_type (struct gdbarch *gdbarch, int regnum)
4770 {
4771 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4772
4773 if (is_s_pseudo (gdbarch, regnum))
4774 return builtin_type (gdbarch)->builtin_float;
4775
4776 if (is_q_pseudo (gdbarch, regnum))
4777 return arm_neon_quad_type (gdbarch);
4778
4779 if (is_mve_pseudo (gdbarch, regnum))
4780 return builtin_type (gdbarch)->builtin_int16;
4781
4782 if (is_pacbti_pseudo (gdbarch, regnum))
4783 return builtin_type (gdbarch)->builtin_uint32;
4784
4785 /* If the target description has register information, we are only
4786 in this function so that we can override the types of
4787 double-precision registers for NEON. */
4788 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4789 {
4790 struct type *t = tdesc_register_type (gdbarch, regnum);
4791
4792 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4793 && t->code () == TYPE_CODE_FLT
4794 && tdep->have_neon)
4795 return arm_neon_double_type (gdbarch);
4796 else
4797 return t;
4798 }
4799
4800 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4801 {
4802 if (!tdep->have_fpa_registers)
4803 return builtin_type (gdbarch)->builtin_void;
4804
4805 return arm_ext_type (gdbarch);
4806 }
4807 else if (regnum == ARM_SP_REGNUM)
4808 return builtin_type (gdbarch)->builtin_data_ptr;
4809 else if (regnum == ARM_PC_REGNUM)
4810 return builtin_type (gdbarch)->builtin_func_ptr;
4811 else if (regnum >= ARRAY_SIZE (arm_register_names))
4812 /* These registers are only supported on targets which supply
4813 an XML description. */
4814 return builtin_type (gdbarch)->builtin_int0;
4815 else
4816 return builtin_type (gdbarch)->builtin_uint32;
4817 }
4818
4819 /* Map a DWARF register REGNUM onto the appropriate GDB register
4820 number. */
4821
4822 static int
4823 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4824 {
4825 /* Core integer regs. */
4826 if (reg >= 0 && reg <= 15)
4827 return reg;
4828
4829 /* Legacy FPA encoding. These were once used in a way which
4830 overlapped with VFP register numbering, so their use is
4831 discouraged, but GDB doesn't support the ARM toolchain
4832 which used them for VFP. */
4833 if (reg >= 16 && reg <= 23)
4834 return ARM_F0_REGNUM + reg - 16;
4835
4836 /* New assignments for the FPA registers. */
4837 if (reg >= 96 && reg <= 103)
4838 return ARM_F0_REGNUM + reg - 96;
4839
4840 /* WMMX register assignments. */
4841 if (reg >= 104 && reg <= 111)
4842 return ARM_WCGR0_REGNUM + reg - 104;
4843
4844 if (reg >= 112 && reg <= 127)
4845 return ARM_WR0_REGNUM + reg - 112;
4846
4847 /* PACBTI register containing the Pointer Authentication Code. */
4848 if (reg == ARM_DWARF_RA_AUTH_CODE)
4849 {
4850 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4851
4852 if (tdep->have_pacbti)
4853 return tdep->pacbti_pseudo_base;
4854
4855 return -1;
4856 }
4857
4858 if (reg >= 192 && reg <= 199)
4859 return ARM_WC0_REGNUM + reg - 192;
4860
4861 /* VFP v2 registers. A double precision value is actually
4862 in d1 rather than s2, but the ABI only defines numbering
4863 for the single precision registers. This will "just work"
4864 in GDB for little endian targets (we'll read eight bytes,
4865 starting in s0 and then progressing to s1), but will be
4866 reversed on big endian targets with VFP. This won't
4867 be a problem for the new Neon quad registers; you're supposed
4868 to use DW_OP_piece for those. */
4869 if (reg >= 64 && reg <= 95)
4870 {
4871 char name_buf[4];
4872
4873 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4874 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4875 strlen (name_buf));
4876 }
4877
4878 /* VFP v3 / Neon registers. This range is also used for VFP v2
4879 registers, except that it now describes d0 instead of s0. */
4880 if (reg >= 256 && reg <= 287)
4881 {
4882 char name_buf[4];
4883
4884 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4885 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4886 strlen (name_buf));
4887 }
4888
4889 return -1;
4890 }
4891
4892 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4893 static int
4894 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4895 {
4896 int reg = regnum;
4897 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4898
4899 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4900 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4901
4902 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4903 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4904
4905 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4906 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4907
4908 if (reg < NUM_GREGS)
4909 return SIM_ARM_R0_REGNUM + reg;
4910 reg -= NUM_GREGS;
4911
4912 if (reg < NUM_FREGS)
4913 return SIM_ARM_FP0_REGNUM + reg;
4914 reg -= NUM_FREGS;
4915
4916 if (reg < NUM_SREGS)
4917 return SIM_ARM_FPS_REGNUM + reg;
4918 reg -= NUM_SREGS;
4919
4920 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4921 }
4922
4923 static const unsigned char op_lit0 = DW_OP_lit0;
4924
4925 static void
4926 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
4927 struct dwarf2_frame_state_reg *reg,
4928 struct frame_info *this_frame)
4929 {
4930 if (is_pacbti_pseudo (gdbarch, regnum))
4931 {
4932 /* Initialize RA_AUTH_CODE to zero. */
4933 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
4934 reg->loc.exp.start = &op_lit0;
4935 reg->loc.exp.len = 1;
4936 return;
4937 }
4938
4939 switch (regnum)
4940 {
4941 case ARM_PC_REGNUM:
4942 case ARM_PS_REGNUM:
4943 reg->how = DWARF2_FRAME_REG_FN;
4944 reg->loc.fn = arm_dwarf2_prev_register;
4945 break;
4946 case ARM_SP_REGNUM:
4947 reg->how = DWARF2_FRAME_REG_CFA;
4948 break;
4949 }
4950 }
4951
4952 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4953 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4954 NULL if an error occurs. BUF is freed. */
4955
4956 static gdb_byte *
4957 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4958 int old_len, int new_len)
4959 {
4960 gdb_byte *new_buf;
4961 int bytes_to_read = new_len - old_len;
4962
4963 new_buf = (gdb_byte *) xmalloc (new_len);
4964 memcpy (new_buf + bytes_to_read, buf, old_len);
4965 xfree (buf);
4966 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4967 {
4968 xfree (new_buf);
4969 return NULL;
4970 }
4971 return new_buf;
4972 }
4973
4974 /* An IT block is at most the 2-byte IT instruction followed by
4975 four 4-byte instructions. The furthest back we must search to
4976 find an IT block that affects the current instruction is thus
4977 2 + 3 * 4 == 14 bytes. */
4978 #define MAX_IT_BLOCK_PREFIX 14
4979
4980 /* Use a quick scan if there are more than this many bytes of
4981 code. */
4982 #define IT_SCAN_THRESHOLD 32
4983
4984 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4985 A breakpoint in an IT block may not be hit, depending on the
4986 condition flags. */
4987 static CORE_ADDR
4988 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4989 {
4990 gdb_byte *buf;
4991 char map_type;
4992 CORE_ADDR boundary, func_start;
4993 int buf_len;
4994 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4995 int i, any, last_it, last_it_count;
4996 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4997
4998 /* If we are using BKPT breakpoints, none of this is necessary. */
4999 if (tdep->thumb2_breakpoint == NULL)
5000 return bpaddr;
5001
5002 /* ARM mode does not have this problem. */
5003 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5004 return bpaddr;
5005
5006 /* We are setting a breakpoint in Thumb code that could potentially
5007 contain an IT block. The first step is to find how much Thumb
5008 code there is; we do not need to read outside of known Thumb
5009 sequences. */
5010 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5011 if (map_type == 0)
5012 /* Thumb-2 code must have mapping symbols to have a chance. */
5013 return bpaddr;
5014
5015 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5016
5017 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5018 && func_start > boundary)
5019 boundary = func_start;
5020
5021 /* Search for a candidate IT instruction. We have to do some fancy
5022 footwork to distinguish a real IT instruction from the second
5023 half of a 32-bit instruction, but there is no need for that if
5024 there's no candidate. */
5025 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5026 if (buf_len == 0)
5027 /* No room for an IT instruction. */
5028 return bpaddr;
5029
5030 buf = (gdb_byte *) xmalloc (buf_len);
5031 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5032 return bpaddr;
5033 any = 0;
5034 for (i = 0; i < buf_len; i += 2)
5035 {
5036 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5037 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5038 {
5039 any = 1;
5040 break;
5041 }
5042 }
5043
5044 if (any == 0)
5045 {
5046 xfree (buf);
5047 return bpaddr;
5048 }
5049
5050 /* OK, the code bytes before this instruction contain at least one
5051 halfword which resembles an IT instruction. We know that it's
5052 Thumb code, but there are still two possibilities. Either the
5053 halfword really is an IT instruction, or it is the second half of
5054 a 32-bit Thumb instruction. The only way we can tell is to
5055 scan forwards from a known instruction boundary. */
5056 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5057 {
5058 int definite;
5059
5060 /* There's a lot of code before this instruction. Start with an
5061 optimistic search; it's easy to recognize halfwords that can
5062 not be the start of a 32-bit instruction, and use that to
5063 lock on to the instruction boundaries. */
5064 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5065 if (buf == NULL)
5066 return bpaddr;
5067 buf_len = IT_SCAN_THRESHOLD;
5068
5069 definite = 0;
5070 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5071 {
5072 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5073 if (thumb_insn_size (inst1) == 2)
5074 {
5075 definite = 1;
5076 break;
5077 }
5078 }
5079
5080 /* At this point, if DEFINITE, BUF[I] is the first place we
5081 are sure that we know the instruction boundaries, and it is far
5082 enough from BPADDR that we could not miss an IT instruction
5083 affecting BPADDR. If ! DEFINITE, give up - start from a
5084 known boundary. */
5085 if (! definite)
5086 {
5087 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5088 bpaddr - boundary);
5089 if (buf == NULL)
5090 return bpaddr;
5091 buf_len = bpaddr - boundary;
5092 i = 0;
5093 }
5094 }
5095 else
5096 {
5097 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5098 if (buf == NULL)
5099 return bpaddr;
5100 buf_len = bpaddr - boundary;
5101 i = 0;
5102 }
5103
5104 /* Scan forwards. Find the last IT instruction before BPADDR. */
5105 last_it = -1;
5106 last_it_count = 0;
5107 while (i < buf_len)
5108 {
5109 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5110 last_it_count--;
5111 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5112 {
5113 last_it = i;
5114 if (inst1 & 0x0001)
5115 last_it_count = 4;
5116 else if (inst1 & 0x0002)
5117 last_it_count = 3;
5118 else if (inst1 & 0x0004)
5119 last_it_count = 2;
5120 else
5121 last_it_count = 1;
5122 }
5123 i += thumb_insn_size (inst1);
5124 }
5125
5126 xfree (buf);
5127
5128 if (last_it == -1)
5129 /* There wasn't really an IT instruction after all. */
5130 return bpaddr;
5131
5132 if (last_it_count < 1)
5133 /* It was too far away. */
5134 return bpaddr;
5135
5136 /* This really is a trouble spot. Move the breakpoint to the IT
5137 instruction. */
5138 return bpaddr - buf_len + last_it;
5139 }
5140
5141 /* ARM displaced stepping support.
5142
5143 Generally ARM displaced stepping works as follows:
5144
5145 1. When an instruction is to be single-stepped, it is first decoded by
5146 arm_process_displaced_insn. Depending on the type of instruction, it is
5147 then copied to a scratch location, possibly in a modified form. The
5148 copy_* set of functions performs such modification, as necessary. A
5149 breakpoint is placed after the modified instruction in the scratch space
5150 to return control to GDB. Note in particular that instructions which
5151 modify the PC will no longer do so after modification.
5152
5153 2. The instruction is single-stepped, by setting the PC to the scratch
5154 location address, and resuming. Control returns to GDB when the
5155 breakpoint is hit.
5156
5157 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5158 function used for the current instruction. This function's job is to
5159 put the CPU/memory state back to what it would have been if the
5160 instruction had been executed unmodified in its original location. */
5161
5162 /* NOP instruction (mov r0, r0). */
5163 #define ARM_NOP 0xe1a00000
5164 #define THUMB_NOP 0x4600
5165
5166 /* Helper for register reads for displaced stepping. In particular, this
5167 returns the PC as it would be seen by the instruction at its original
5168 location. */
5169
5170 ULONGEST
5171 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5172 int regno)
5173 {
5174 ULONGEST ret;
5175 CORE_ADDR from = dsc->insn_addr;
5176
5177 if (regno == ARM_PC_REGNUM)
5178 {
5179 /* Compute pipeline offset:
5180 - When executing an ARM instruction, PC reads as the address of the
5181 current instruction plus 8.
5182 - When executing a Thumb instruction, PC reads as the address of the
5183 current instruction plus 4. */
5184
5185 if (!dsc->is_thumb)
5186 from += 8;
5187 else
5188 from += 4;
5189
5190 displaced_debug_printf ("read pc value %.8lx",
5191 (unsigned long) from);
5192 return (ULONGEST) from;
5193 }
5194 else
5195 {
5196 regcache_cooked_read_unsigned (regs, regno, &ret);
5197
5198 displaced_debug_printf ("read r%d value %.8lx",
5199 regno, (unsigned long) ret);
5200
5201 return ret;
5202 }
5203 }
5204
5205 static int
5206 displaced_in_arm_mode (struct regcache *regs)
5207 {
5208 ULONGEST ps;
5209 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5210
5211 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5212
5213 return (ps & t_bit) == 0;
5214 }
5215
5216 /* Write to the PC as from a branch instruction. */
5217
5218 static void
5219 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5220 ULONGEST val)
5221 {
5222 if (!dsc->is_thumb)
5223 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5224 architecture versions < 6. */
5225 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5226 val & ~(ULONGEST) 0x3);
5227 else
5228 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5229 val & ~(ULONGEST) 0x1);
5230 }
5231
5232 /* Write to the PC as from a branch-exchange instruction. */
5233
5234 static void
5235 bx_write_pc (struct regcache *regs, ULONGEST val)
5236 {
5237 ULONGEST ps;
5238 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5239
5240 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5241
5242 if ((val & 1) == 1)
5243 {
5244 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5245 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5246 }
5247 else if ((val & 2) == 0)
5248 {
5249 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5250 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5251 }
5252 else
5253 {
5254 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5255 mode, align dest to 4 bytes). */
5256 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5257 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5258 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5259 }
5260 }
5261
5262 /* Write to the PC as if from a load instruction. */
5263
5264 static void
5265 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5266 ULONGEST val)
5267 {
5268 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5269 bx_write_pc (regs, val);
5270 else
5271 branch_write_pc (regs, dsc, val);
5272 }
5273
5274 /* Write to the PC as if from an ALU instruction. */
5275
5276 static void
5277 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5278 ULONGEST val)
5279 {
5280 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5281 bx_write_pc (regs, val);
5282 else
5283 branch_write_pc (regs, dsc, val);
5284 }
5285
5286 /* Helper for writing to registers for displaced stepping. Writing to the PC
5287 has a varying effects depending on the instruction which does the write:
5288 this is controlled by the WRITE_PC argument. */
5289
5290 void
5291 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5292 int regno, ULONGEST val, enum pc_write_style write_pc)
5293 {
5294 if (regno == ARM_PC_REGNUM)
5295 {
5296 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5297
5298 switch (write_pc)
5299 {
5300 case BRANCH_WRITE_PC:
5301 branch_write_pc (regs, dsc, val);
5302 break;
5303
5304 case BX_WRITE_PC:
5305 bx_write_pc (regs, val);
5306 break;
5307
5308 case LOAD_WRITE_PC:
5309 load_write_pc (regs, dsc, val);
5310 break;
5311
5312 case ALU_WRITE_PC:
5313 alu_write_pc (regs, dsc, val);
5314 break;
5315
5316 case CANNOT_WRITE_PC:
5317 warning (_("Instruction wrote to PC in an unexpected way when "
5318 "single-stepping"));
5319 break;
5320
5321 default:
5322 internal_error (__FILE__, __LINE__,
5323 _("Invalid argument to displaced_write_reg"));
5324 }
5325
5326 dsc->wrote_to_pc = 1;
5327 }
5328 else
5329 {
5330 displaced_debug_printf ("writing r%d value %.8lx",
5331 regno, (unsigned long) val);
5332 regcache_cooked_write_unsigned (regs, regno, val);
5333 }
5334 }
5335
5336 /* This function is used to concisely determine if an instruction INSN
5337 references PC. Register fields of interest in INSN should have the
5338 corresponding fields of BITMASK set to 0b1111. The function
5339 returns return 1 if any of these fields in INSN reference the PC
5340 (also 0b1111, r15), else it returns 0. */
5341
5342 static int
5343 insn_references_pc (uint32_t insn, uint32_t bitmask)
5344 {
5345 uint32_t lowbit = 1;
5346
5347 while (bitmask != 0)
5348 {
5349 uint32_t mask;
5350
5351 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5352 ;
5353
5354 if (!lowbit)
5355 break;
5356
5357 mask = lowbit * 0xf;
5358
5359 if ((insn & mask) == mask)
5360 return 1;
5361
5362 bitmask &= ~mask;
5363 }
5364
5365 return 0;
5366 }
5367
5368 /* The simplest copy function. Many instructions have the same effect no
5369 matter what address they are executed at: in those cases, use this. */
5370
5371 static int
5372 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5373 arm_displaced_step_copy_insn_closure *dsc)
5374 {
5375 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5376 (unsigned long) insn, iname);
5377
5378 dsc->modinsn[0] = insn;
5379
5380 return 0;
5381 }
5382
5383 static int
5384 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5385 uint16_t insn2, const char *iname,
5386 arm_displaced_step_copy_insn_closure *dsc)
5387 {
5388 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5389 "unmodified", insn1, insn2, iname);
5390
5391 dsc->modinsn[0] = insn1;
5392 dsc->modinsn[1] = insn2;
5393 dsc->numinsns = 2;
5394
5395 return 0;
5396 }
5397
5398 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5399 modification. */
5400 static int
5401 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5402 const char *iname,
5403 arm_displaced_step_copy_insn_closure *dsc)
5404 {
5405 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5406 insn, iname);
5407
5408 dsc->modinsn[0] = insn;
5409
5410 return 0;
5411 }
5412
5413 /* Preload instructions with immediate offset. */
5414
5415 static void
5416 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5417 arm_displaced_step_copy_insn_closure *dsc)
5418 {
5419 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5420 if (!dsc->u.preload.immed)
5421 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5422 }
5423
5424 static void
5425 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5426 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5427 {
5428 ULONGEST rn_val;
5429 /* Preload instructions:
5430
5431 {pli/pld} [rn, #+/-imm]
5432 ->
5433 {pli/pld} [r0, #+/-imm]. */
5434
5435 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5436 rn_val = displaced_read_reg (regs, dsc, rn);
5437 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5438 dsc->u.preload.immed = 1;
5439
5440 dsc->cleanup = &cleanup_preload;
5441 }
5442
5443 static int
5444 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5445 arm_displaced_step_copy_insn_closure *dsc)
5446 {
5447 unsigned int rn = bits (insn, 16, 19);
5448
5449 if (!insn_references_pc (insn, 0x000f0000ul))
5450 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5451
5452 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5453
5454 dsc->modinsn[0] = insn & 0xfff0ffff;
5455
5456 install_preload (gdbarch, regs, dsc, rn);
5457
5458 return 0;
5459 }
5460
5461 static int
5462 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5463 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5464 {
5465 unsigned int rn = bits (insn1, 0, 3);
5466 unsigned int u_bit = bit (insn1, 7);
5467 int imm12 = bits (insn2, 0, 11);
5468 ULONGEST pc_val;
5469
5470 if (rn != ARM_PC_REGNUM)
5471 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5472
5473 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5474 PLD (literal) Encoding T1. */
5475 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5476 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5477 imm12);
5478
5479 if (!u_bit)
5480 imm12 = -1 * imm12;
5481
5482 /* Rewrite instruction {pli/pld} PC imm12 into:
5483 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5484
5485 {pli/pld} [r0, r1]
5486
5487 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5488
5489 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5490 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5491
5492 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5493
5494 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5495 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5496 dsc->u.preload.immed = 0;
5497
5498 /* {pli/pld} [r0, r1] */
5499 dsc->modinsn[0] = insn1 & 0xfff0;
5500 dsc->modinsn[1] = 0xf001;
5501 dsc->numinsns = 2;
5502
5503 dsc->cleanup = &cleanup_preload;
5504 return 0;
5505 }
5506
5507 /* Preload instructions with register offset. */
5508
5509 static void
5510 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5511 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5512 unsigned int rm)
5513 {
5514 ULONGEST rn_val, rm_val;
5515
5516 /* Preload register-offset instructions:
5517
5518 {pli/pld} [rn, rm {, shift}]
5519 ->
5520 {pli/pld} [r0, r1 {, shift}]. */
5521
5522 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5523 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5524 rn_val = displaced_read_reg (regs, dsc, rn);
5525 rm_val = displaced_read_reg (regs, dsc, rm);
5526 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5527 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5528 dsc->u.preload.immed = 0;
5529
5530 dsc->cleanup = &cleanup_preload;
5531 }
5532
5533 static int
5534 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5535 struct regcache *regs,
5536 arm_displaced_step_copy_insn_closure *dsc)
5537 {
5538 unsigned int rn = bits (insn, 16, 19);
5539 unsigned int rm = bits (insn, 0, 3);
5540
5541
5542 if (!insn_references_pc (insn, 0x000f000ful))
5543 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5544
5545 displaced_debug_printf ("copying preload insn %.8lx",
5546 (unsigned long) insn);
5547
5548 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5549
5550 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5551 return 0;
5552 }
5553
5554 /* Copy/cleanup coprocessor load and store instructions. */
5555
5556 static void
5557 cleanup_copro_load_store (struct gdbarch *gdbarch,
5558 struct regcache *regs,
5559 arm_displaced_step_copy_insn_closure *dsc)
5560 {
5561 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5562
5563 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5564
5565 if (dsc->u.ldst.writeback)
5566 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5567 }
5568
5569 static void
5570 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5571 arm_displaced_step_copy_insn_closure *dsc,
5572 int writeback, unsigned int rn)
5573 {
5574 ULONGEST rn_val;
5575
5576 /* Coprocessor load/store instructions:
5577
5578 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5579 ->
5580 {stc/stc2} [r0, #+/-imm].
5581
5582 ldc/ldc2 are handled identically. */
5583
5584 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5585 rn_val = displaced_read_reg (regs, dsc, rn);
5586 /* PC should be 4-byte aligned. */
5587 rn_val = rn_val & 0xfffffffc;
5588 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5589
5590 dsc->u.ldst.writeback = writeback;
5591 dsc->u.ldst.rn = rn;
5592
5593 dsc->cleanup = &cleanup_copro_load_store;
5594 }
5595
5596 static int
5597 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5598 struct regcache *regs,
5599 arm_displaced_step_copy_insn_closure *dsc)
5600 {
5601 unsigned int rn = bits (insn, 16, 19);
5602
5603 if (!insn_references_pc (insn, 0x000f0000ul))
5604 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5605
5606 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5607 (unsigned long) insn);
5608
5609 dsc->modinsn[0] = insn & 0xfff0ffff;
5610
5611 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5612
5613 return 0;
5614 }
5615
5616 static int
5617 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5618 uint16_t insn2, struct regcache *regs,
5619 arm_displaced_step_copy_insn_closure *dsc)
5620 {
5621 unsigned int rn = bits (insn1, 0, 3);
5622
5623 if (rn != ARM_PC_REGNUM)
5624 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5625 "copro load/store", dsc);
5626
5627 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5628 insn1, insn2);
5629
5630 dsc->modinsn[0] = insn1 & 0xfff0;
5631 dsc->modinsn[1] = insn2;
5632 dsc->numinsns = 2;
5633
5634 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5635 doesn't support writeback, so pass 0. */
5636 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5637
5638 return 0;
5639 }
5640
5641 /* Clean up branch instructions (actually perform the branch, by setting
5642 PC). */
5643
5644 static void
5645 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5646 arm_displaced_step_copy_insn_closure *dsc)
5647 {
5648 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5649 int branch_taken = condition_true (dsc->u.branch.cond, status);
5650 enum pc_write_style write_pc = dsc->u.branch.exchange
5651 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5652
5653 if (!branch_taken)
5654 return;
5655
5656 if (dsc->u.branch.link)
5657 {
5658 /* The value of LR should be the next insn of current one. In order
5659 not to confuse logic handling later insn `bx lr', if current insn mode
5660 is Thumb, the bit 0 of LR value should be set to 1. */
5661 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5662
5663 if (dsc->is_thumb)
5664 next_insn_addr |= 0x1;
5665
5666 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5667 CANNOT_WRITE_PC);
5668 }
5669
5670 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5671 }
5672
5673 /* Copy B/BL/BLX instructions with immediate destinations. */
5674
5675 static void
5676 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5677 arm_displaced_step_copy_insn_closure *dsc,
5678 unsigned int cond, int exchange, int link, long offset)
5679 {
5680 /* Implement "BL<cond> <label>" as:
5681
5682 Preparation: cond <- instruction condition
5683 Insn: mov r0, r0 (nop)
5684 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5685
5686 B<cond> similar, but don't set r14 in cleanup. */
5687
5688 dsc->u.branch.cond = cond;
5689 dsc->u.branch.link = link;
5690 dsc->u.branch.exchange = exchange;
5691
5692 dsc->u.branch.dest = dsc->insn_addr;
5693 if (link && exchange)
5694 /* For BLX, offset is computed from the Align (PC, 4). */
5695 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5696
5697 if (dsc->is_thumb)
5698 dsc->u.branch.dest += 4 + offset;
5699 else
5700 dsc->u.branch.dest += 8 + offset;
5701
5702 dsc->cleanup = &cleanup_branch;
5703 }
5704 static int
5705 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5706 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5707 {
5708 unsigned int cond = bits (insn, 28, 31);
5709 int exchange = (cond == 0xf);
5710 int link = exchange || bit (insn, 24);
5711 long offset;
5712
5713 displaced_debug_printf ("copying %s immediate insn %.8lx",
5714 (exchange) ? "blx" : (link) ? "bl" : "b",
5715 (unsigned long) insn);
5716 if (exchange)
5717 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5718 then arrange the switch into Thumb mode. */
5719 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5720 else
5721 offset = bits (insn, 0, 23) << 2;
5722
5723 if (bit (offset, 25))
5724 offset = offset | ~0x3ffffff;
5725
5726 dsc->modinsn[0] = ARM_NOP;
5727
5728 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5729 return 0;
5730 }
5731
5732 static int
5733 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5734 uint16_t insn2, struct regcache *regs,
5735 arm_displaced_step_copy_insn_closure *dsc)
5736 {
5737 int link = bit (insn2, 14);
5738 int exchange = link && !bit (insn2, 12);
5739 int cond = INST_AL;
5740 long offset = 0;
5741 int j1 = bit (insn2, 13);
5742 int j2 = bit (insn2, 11);
5743 int s = sbits (insn1, 10, 10);
5744 int i1 = !(j1 ^ bit (insn1, 10));
5745 int i2 = !(j2 ^ bit (insn1, 10));
5746
5747 if (!link && !exchange) /* B */
5748 {
5749 offset = (bits (insn2, 0, 10) << 1);
5750 if (bit (insn2, 12)) /* Encoding T4 */
5751 {
5752 offset |= (bits (insn1, 0, 9) << 12)
5753 | (i2 << 22)
5754 | (i1 << 23)
5755 | (s << 24);
5756 cond = INST_AL;
5757 }
5758 else /* Encoding T3 */
5759 {
5760 offset |= (bits (insn1, 0, 5) << 12)
5761 | (j1 << 18)
5762 | (j2 << 19)
5763 | (s << 20);
5764 cond = bits (insn1, 6, 9);
5765 }
5766 }
5767 else
5768 {
5769 offset = (bits (insn1, 0, 9) << 12);
5770 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5771 offset |= exchange ?
5772 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5773 }
5774
5775 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5776 link ? (exchange) ? "blx" : "bl" : "b",
5777 insn1, insn2, offset);
5778
5779 dsc->modinsn[0] = THUMB_NOP;
5780
5781 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5782 return 0;
5783 }
5784
5785 /* Copy B Thumb instructions. */
5786 static int
5787 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5788 arm_displaced_step_copy_insn_closure *dsc)
5789 {
5790 unsigned int cond = 0;
5791 int offset = 0;
5792 unsigned short bit_12_15 = bits (insn, 12, 15);
5793 CORE_ADDR from = dsc->insn_addr;
5794
5795 if (bit_12_15 == 0xd)
5796 {
5797 /* offset = SignExtend (imm8:0, 32) */
5798 offset = sbits ((insn << 1), 0, 8);
5799 cond = bits (insn, 8, 11);
5800 }
5801 else if (bit_12_15 == 0xe) /* Encoding T2 */
5802 {
5803 offset = sbits ((insn << 1), 0, 11);
5804 cond = INST_AL;
5805 }
5806
5807 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5808 insn, offset);
5809
5810 dsc->u.branch.cond = cond;
5811 dsc->u.branch.link = 0;
5812 dsc->u.branch.exchange = 0;
5813 dsc->u.branch.dest = from + 4 + offset;
5814
5815 dsc->modinsn[0] = THUMB_NOP;
5816
5817 dsc->cleanup = &cleanup_branch;
5818
5819 return 0;
5820 }
5821
5822 /* Copy BX/BLX with register-specified destinations. */
5823
5824 static void
5825 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5826 arm_displaced_step_copy_insn_closure *dsc, int link,
5827 unsigned int cond, unsigned int rm)
5828 {
5829 /* Implement {BX,BLX}<cond> <reg>" as:
5830
5831 Preparation: cond <- instruction condition
5832 Insn: mov r0, r0 (nop)
5833 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5834
5835 Don't set r14 in cleanup for BX. */
5836
5837 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5838
5839 dsc->u.branch.cond = cond;
5840 dsc->u.branch.link = link;
5841
5842 dsc->u.branch.exchange = 1;
5843
5844 dsc->cleanup = &cleanup_branch;
5845 }
5846
5847 static int
5848 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5849 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5850 {
5851 unsigned int cond = bits (insn, 28, 31);
5852 /* BX: x12xxx1x
5853 BLX: x12xxx3x. */
5854 int link = bit (insn, 5);
5855 unsigned int rm = bits (insn, 0, 3);
5856
5857 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5858
5859 dsc->modinsn[0] = ARM_NOP;
5860
5861 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5862 return 0;
5863 }
5864
5865 static int
5866 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5867 struct regcache *regs,
5868 arm_displaced_step_copy_insn_closure *dsc)
5869 {
5870 int link = bit (insn, 7);
5871 unsigned int rm = bits (insn, 3, 6);
5872
5873 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5874
5875 dsc->modinsn[0] = THUMB_NOP;
5876
5877 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5878
5879 return 0;
5880 }
5881
5882
5883 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5884
5885 static void
5886 cleanup_alu_imm (struct gdbarch *gdbarch,
5887 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5888 {
5889 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5890 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5891 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5892 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5893 }
5894
5895 static int
5896 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5897 arm_displaced_step_copy_insn_closure *dsc)
5898 {
5899 unsigned int rn = bits (insn, 16, 19);
5900 unsigned int rd = bits (insn, 12, 15);
5901 unsigned int op = bits (insn, 21, 24);
5902 int is_mov = (op == 0xd);
5903 ULONGEST rd_val, rn_val;
5904
5905 if (!insn_references_pc (insn, 0x000ff000ul))
5906 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5907
5908 displaced_debug_printf ("copying immediate %s insn %.8lx",
5909 is_mov ? "move" : "ALU",
5910 (unsigned long) insn);
5911
5912 /* Instruction is of form:
5913
5914 <op><cond> rd, [rn,] #imm
5915
5916 Rewrite as:
5917
5918 Preparation: tmp1, tmp2 <- r0, r1;
5919 r0, r1 <- rd, rn
5920 Insn: <op><cond> r0, r1, #imm
5921 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5922 */
5923
5924 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5925 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5926 rn_val = displaced_read_reg (regs, dsc, rn);
5927 rd_val = displaced_read_reg (regs, dsc, rd);
5928 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5929 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5930 dsc->rd = rd;
5931
5932 if (is_mov)
5933 dsc->modinsn[0] = insn & 0xfff00fff;
5934 else
5935 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5936
5937 dsc->cleanup = &cleanup_alu_imm;
5938
5939 return 0;
5940 }
5941
5942 static int
5943 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5944 uint16_t insn2, struct regcache *regs,
5945 arm_displaced_step_copy_insn_closure *dsc)
5946 {
5947 unsigned int op = bits (insn1, 5, 8);
5948 unsigned int rn, rm, rd;
5949 ULONGEST rd_val, rn_val;
5950
5951 rn = bits (insn1, 0, 3); /* Rn */
5952 rm = bits (insn2, 0, 3); /* Rm */
5953 rd = bits (insn2, 8, 11); /* Rd */
5954
5955 /* This routine is only called for instruction MOV. */
5956 gdb_assert (op == 0x2 && rn == 0xf);
5957
5958 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5960
5961 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5962
5963 /* Instruction is of form:
5964
5965 <op><cond> rd, [rn,] #imm
5966
5967 Rewrite as:
5968
5969 Preparation: tmp1, tmp2 <- r0, r1;
5970 r0, r1 <- rd, rn
5971 Insn: <op><cond> r0, r1, #imm
5972 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5973 */
5974
5975 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5976 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5977 rn_val = displaced_read_reg (regs, dsc, rn);
5978 rd_val = displaced_read_reg (regs, dsc, rd);
5979 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5980 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5981 dsc->rd = rd;
5982
5983 dsc->modinsn[0] = insn1;
5984 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5985 dsc->numinsns = 2;
5986
5987 dsc->cleanup = &cleanup_alu_imm;
5988
5989 return 0;
5990 }
5991
5992 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5993
5994 static void
5995 cleanup_alu_reg (struct gdbarch *gdbarch,
5996 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5997 {
5998 ULONGEST rd_val;
5999 int i;
6000
6001 rd_val = displaced_read_reg (regs, dsc, 0);
6002
6003 for (i = 0; i < 3; i++)
6004 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6005
6006 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6007 }
6008
6009 static void
6010 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6011 arm_displaced_step_copy_insn_closure *dsc,
6012 unsigned int rd, unsigned int rn, unsigned int rm)
6013 {
6014 ULONGEST rd_val, rn_val, rm_val;
6015
6016 /* Instruction is of form:
6017
6018 <op><cond> rd, [rn,] rm [, <shift>]
6019
6020 Rewrite as:
6021
6022 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6023 r0, r1, r2 <- rd, rn, rm
6024 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6025 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6026 */
6027
6028 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6029 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6030 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6031 rd_val = displaced_read_reg (regs, dsc, rd);
6032 rn_val = displaced_read_reg (regs, dsc, rn);
6033 rm_val = displaced_read_reg (regs, dsc, rm);
6034 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6035 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6036 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6037 dsc->rd = rd;
6038
6039 dsc->cleanup = &cleanup_alu_reg;
6040 }
6041
6042 static int
6043 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6044 arm_displaced_step_copy_insn_closure *dsc)
6045 {
6046 unsigned int op = bits (insn, 21, 24);
6047 int is_mov = (op == 0xd);
6048
6049 if (!insn_references_pc (insn, 0x000ff00ful))
6050 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6051
6052 displaced_debug_printf ("copying reg %s insn %.8lx",
6053 is_mov ? "move" : "ALU", (unsigned long) insn);
6054
6055 if (is_mov)
6056 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6057 else
6058 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6059
6060 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6061 bits (insn, 0, 3));
6062 return 0;
6063 }
6064
6065 static int
6066 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6067 struct regcache *regs,
6068 arm_displaced_step_copy_insn_closure *dsc)
6069 {
6070 unsigned rm, rd;
6071
6072 rm = bits (insn, 3, 6);
6073 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6074
6075 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6076 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6077
6078 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6079
6080 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6081
6082 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6083
6084 return 0;
6085 }
6086
6087 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6088
6089 static void
6090 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6091 struct regcache *regs,
6092 arm_displaced_step_copy_insn_closure *dsc)
6093 {
6094 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6095 int i;
6096
6097 for (i = 0; i < 4; i++)
6098 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6099
6100 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6101 }
6102
6103 static void
6104 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6105 arm_displaced_step_copy_insn_closure *dsc,
6106 unsigned int rd, unsigned int rn, unsigned int rm,
6107 unsigned rs)
6108 {
6109 int i;
6110 ULONGEST rd_val, rn_val, rm_val, rs_val;
6111
6112 /* Instruction is of form:
6113
6114 <op><cond> rd, [rn,] rm, <shift> rs
6115
6116 Rewrite as:
6117
6118 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6119 r0, r1, r2, r3 <- rd, rn, rm, rs
6120 Insn: <op><cond> r0, r1, r2, <shift> r3
6121 Cleanup: tmp5 <- r0
6122 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6123 rd <- tmp5
6124 */
6125
6126 for (i = 0; i < 4; i++)
6127 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6128
6129 rd_val = displaced_read_reg (regs, dsc, rd);
6130 rn_val = displaced_read_reg (regs, dsc, rn);
6131 rm_val = displaced_read_reg (regs, dsc, rm);
6132 rs_val = displaced_read_reg (regs, dsc, rs);
6133 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6134 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6135 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6136 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6137 dsc->rd = rd;
6138 dsc->cleanup = &cleanup_alu_shifted_reg;
6139 }
6140
6141 static int
6142 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6143 struct regcache *regs,
6144 arm_displaced_step_copy_insn_closure *dsc)
6145 {
6146 unsigned int op = bits (insn, 21, 24);
6147 int is_mov = (op == 0xd);
6148 unsigned int rd, rn, rm, rs;
6149
6150 if (!insn_references_pc (insn, 0x000fff0ful))
6151 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6152
6153 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6154 is_mov ? "move" : "ALU",
6155 (unsigned long) insn);
6156
6157 rn = bits (insn, 16, 19);
6158 rm = bits (insn, 0, 3);
6159 rs = bits (insn, 8, 11);
6160 rd = bits (insn, 12, 15);
6161
6162 if (is_mov)
6163 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6164 else
6165 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6166
6167 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6168
6169 return 0;
6170 }
6171
6172 /* Clean up load instructions. */
6173
6174 static void
6175 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6176 arm_displaced_step_copy_insn_closure *dsc)
6177 {
6178 ULONGEST rt_val, rt_val2 = 0, rn_val;
6179
6180 rt_val = displaced_read_reg (regs, dsc, 0);
6181 if (dsc->u.ldst.xfersize == 8)
6182 rt_val2 = displaced_read_reg (regs, dsc, 1);
6183 rn_val = displaced_read_reg (regs, dsc, 2);
6184
6185 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6186 if (dsc->u.ldst.xfersize > 4)
6187 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6188 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6189 if (!dsc->u.ldst.immed)
6190 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6191
6192 /* Handle register writeback. */
6193 if (dsc->u.ldst.writeback)
6194 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6195 /* Put result in right place. */
6196 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6197 if (dsc->u.ldst.xfersize == 8)
6198 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6199 }
6200
6201 /* Clean up store instructions. */
6202
6203 static void
6204 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6205 arm_displaced_step_copy_insn_closure *dsc)
6206 {
6207 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6208
6209 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6210 if (dsc->u.ldst.xfersize > 4)
6211 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6212 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6213 if (!dsc->u.ldst.immed)
6214 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6215 if (!dsc->u.ldst.restore_r4)
6216 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6217
6218 /* Writeback. */
6219 if (dsc->u.ldst.writeback)
6220 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6221 }
6222
6223 /* Copy "extra" load/store instructions. These are halfword/doubleword
6224 transfers, which have a different encoding to byte/word transfers. */
6225
6226 static int
6227 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6228 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6229 {
6230 unsigned int op1 = bits (insn, 20, 24);
6231 unsigned int op2 = bits (insn, 5, 6);
6232 unsigned int rt = bits (insn, 12, 15);
6233 unsigned int rn = bits (insn, 16, 19);
6234 unsigned int rm = bits (insn, 0, 3);
6235 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6236 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6237 int immed = (op1 & 0x4) != 0;
6238 int opcode;
6239 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6240
6241 if (!insn_references_pc (insn, 0x000ff00ful))
6242 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6243
6244 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6245 unprivileged ? "unprivileged " : "",
6246 (unsigned long) insn);
6247
6248 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6249
6250 if (opcode < 0)
6251 internal_error (__FILE__, __LINE__,
6252 _("copy_extra_ld_st: instruction decode error"));
6253
6254 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6255 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6256 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6257 if (!immed)
6258 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6259
6260 rt_val = displaced_read_reg (regs, dsc, rt);
6261 if (bytesize[opcode] == 8)
6262 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6263 rn_val = displaced_read_reg (regs, dsc, rn);
6264 if (!immed)
6265 rm_val = displaced_read_reg (regs, dsc, rm);
6266
6267 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6268 if (bytesize[opcode] == 8)
6269 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6270 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6271 if (!immed)
6272 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6273
6274 dsc->rd = rt;
6275 dsc->u.ldst.xfersize = bytesize[opcode];
6276 dsc->u.ldst.rn = rn;
6277 dsc->u.ldst.immed = immed;
6278 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6279 dsc->u.ldst.restore_r4 = 0;
6280
6281 if (immed)
6282 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6283 ->
6284 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6285 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6286 else
6287 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6288 ->
6289 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6290 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6291
6292 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6293
6294 return 0;
6295 }
6296
6297 /* Copy byte/half word/word loads and stores. */
6298
6299 static void
6300 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6301 arm_displaced_step_copy_insn_closure *dsc, int load,
6302 int immed, int writeback, int size, int usermode,
6303 int rt, int rm, int rn)
6304 {
6305 ULONGEST rt_val, rn_val, rm_val = 0;
6306
6307 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6308 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6309 if (!immed)
6310 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6311 if (!load)
6312 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6313
6314 rt_val = displaced_read_reg (regs, dsc, rt);
6315 rn_val = displaced_read_reg (regs, dsc, rn);
6316 if (!immed)
6317 rm_val = displaced_read_reg (regs, dsc, rm);
6318
6319 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6320 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6321 if (!immed)
6322 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6323 dsc->rd = rt;
6324 dsc->u.ldst.xfersize = size;
6325 dsc->u.ldst.rn = rn;
6326 dsc->u.ldst.immed = immed;
6327 dsc->u.ldst.writeback = writeback;
6328
6329 /* To write PC we can do:
6330
6331 Before this sequence of instructions:
6332 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6333 r2 is the Rn value got from displaced_read_reg.
6334
6335 Insn1: push {pc} Write address of STR instruction + offset on stack
6336 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6337 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6338 = addr(Insn1) + offset - addr(Insn3) - 8
6339 = offset - 16
6340 Insn4: add r4, r4, #8 r4 = offset - 8
6341 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6342 = from + offset
6343 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6344
6345 Otherwise we don't know what value to write for PC, since the offset is
6346 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6347 of this can be found in Section "Saving from r15" in
6348 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6349
6350 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6351 }
6352
6353
6354 static int
6355 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6356 uint16_t insn2, struct regcache *regs,
6357 arm_displaced_step_copy_insn_closure *dsc, int size)
6358 {
6359 unsigned int u_bit = bit (insn1, 7);
6360 unsigned int rt = bits (insn2, 12, 15);
6361 int imm12 = bits (insn2, 0, 11);
6362 ULONGEST pc_val;
6363
6364 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6365 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6366 imm12);
6367
6368 if (!u_bit)
6369 imm12 = -1 * imm12;
6370
6371 /* Rewrite instruction LDR Rt imm12 into:
6372
6373 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6374
6375 LDR R0, R2, R3,
6376
6377 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6378
6379
6380 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6381 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6382 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6383
6384 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6385
6386 pc_val = pc_val & 0xfffffffc;
6387
6388 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6389 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6390
6391 dsc->rd = rt;
6392
6393 dsc->u.ldst.xfersize = size;
6394 dsc->u.ldst.immed = 0;
6395 dsc->u.ldst.writeback = 0;
6396 dsc->u.ldst.restore_r4 = 0;
6397
6398 /* LDR R0, R2, R3 */
6399 dsc->modinsn[0] = 0xf852;
6400 dsc->modinsn[1] = 0x3;
6401 dsc->numinsns = 2;
6402
6403 dsc->cleanup = &cleanup_load;
6404
6405 return 0;
6406 }
6407
6408 static int
6409 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6410 uint16_t insn2, struct regcache *regs,
6411 arm_displaced_step_copy_insn_closure *dsc,
6412 int writeback, int immed)
6413 {
6414 unsigned int rt = bits (insn2, 12, 15);
6415 unsigned int rn = bits (insn1, 0, 3);
6416 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6417 /* In LDR (register), there is also a register Rm, which is not allowed to
6418 be PC, so we don't have to check it. */
6419
6420 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6421 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6422 dsc);
6423
6424 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6425 rt, rn, insn1, insn2);
6426
6427 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6428 0, rt, rm, rn);
6429
6430 dsc->u.ldst.restore_r4 = 0;
6431
6432 if (immed)
6433 /* ldr[b]<cond> rt, [rn, #imm], etc.
6434 ->
6435 ldr[b]<cond> r0, [r2, #imm]. */
6436 {
6437 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6438 dsc->modinsn[1] = insn2 & 0x0fff;
6439 }
6440 else
6441 /* ldr[b]<cond> rt, [rn, rm], etc.
6442 ->
6443 ldr[b]<cond> r0, [r2, r3]. */
6444 {
6445 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6446 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6447 }
6448
6449 dsc->numinsns = 2;
6450
6451 return 0;
6452 }
6453
6454
6455 static int
6456 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6457 struct regcache *regs,
6458 arm_displaced_step_copy_insn_closure *dsc,
6459 int load, int size, int usermode)
6460 {
6461 int immed = !bit (insn, 25);
6462 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6463 unsigned int rt = bits (insn, 12, 15);
6464 unsigned int rn = bits (insn, 16, 19);
6465 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6466
6467 if (!insn_references_pc (insn, 0x000ff00ful))
6468 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6469
6470 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6471 load ? (size == 1 ? "ldrb" : "ldr")
6472 : (size == 1 ? "strb" : "str"),
6473 usermode ? "t" : "",
6474 rt, rn,
6475 (unsigned long) insn);
6476
6477 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6478 usermode, rt, rm, rn);
6479
6480 if (load || rt != ARM_PC_REGNUM)
6481 {
6482 dsc->u.ldst.restore_r4 = 0;
6483
6484 if (immed)
6485 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6486 ->
6487 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6488 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6489 else
6490 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6491 ->
6492 {ldr,str}[b]<cond> r0, [r2, r3]. */
6493 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6494 }
6495 else
6496 {
6497 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6498 dsc->u.ldst.restore_r4 = 1;
6499 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6500 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6501 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6502 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6503 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6504
6505 /* As above. */
6506 if (immed)
6507 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6508 else
6509 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6510
6511 dsc->numinsns = 6;
6512 }
6513
6514 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6515
6516 return 0;
6517 }
6518
6519 /* Cleanup LDM instructions with fully-populated register list. This is an
6520 unfortunate corner case: it's impossible to implement correctly by modifying
6521 the instruction. The issue is as follows: we have an instruction,
6522
6523 ldm rN, {r0-r15}
6524
6525 which we must rewrite to avoid loading PC. A possible solution would be to
6526 do the load in two halves, something like (with suitable cleanup
6527 afterwards):
6528
6529 mov r8, rN
6530 ldm[id][ab] r8!, {r0-r7}
6531 str r7, <temp>
6532 ldm[id][ab] r8, {r7-r14}
6533 <bkpt>
6534
6535 but at present there's no suitable place for <temp>, since the scratch space
6536 is overwritten before the cleanup routine is called. For now, we simply
6537 emulate the instruction. */
6538
6539 static void
6540 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6541 arm_displaced_step_copy_insn_closure *dsc)
6542 {
6543 int inc = dsc->u.block.increment;
6544 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6545 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6546 uint32_t regmask = dsc->u.block.regmask;
6547 int regno = inc ? 0 : 15;
6548 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6549 int exception_return = dsc->u.block.load && dsc->u.block.user
6550 && (regmask & 0x8000) != 0;
6551 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6552 int do_transfer = condition_true (dsc->u.block.cond, status);
6553 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6554
6555 if (!do_transfer)
6556 return;
6557
6558 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6559 sensible we can do here. Complain loudly. */
6560 if (exception_return)
6561 error (_("Cannot single-step exception return"));
6562
6563 /* We don't handle any stores here for now. */
6564 gdb_assert (dsc->u.block.load != 0);
6565
6566 displaced_debug_printf ("emulating block transfer: %s %s %s",
6567 dsc->u.block.load ? "ldm" : "stm",
6568 dsc->u.block.increment ? "inc" : "dec",
6569 dsc->u.block.before ? "before" : "after");
6570
6571 while (regmask)
6572 {
6573 uint32_t memword;
6574
6575 if (inc)
6576 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6577 regno++;
6578 else
6579 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6580 regno--;
6581
6582 xfer_addr += bump_before;
6583
6584 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6585 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6586
6587 xfer_addr += bump_after;
6588
6589 regmask &= ~(1 << regno);
6590 }
6591
6592 if (dsc->u.block.writeback)
6593 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6594 CANNOT_WRITE_PC);
6595 }
6596
6597 /* Clean up an STM which included the PC in the register list. */
6598
6599 static void
6600 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6601 arm_displaced_step_copy_insn_closure *dsc)
6602 {
6603 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6604 int store_executed = condition_true (dsc->u.block.cond, status);
6605 CORE_ADDR pc_stored_at, transferred_regs
6606 = count_one_bits (dsc->u.block.regmask);
6607 CORE_ADDR stm_insn_addr;
6608 uint32_t pc_val;
6609 long offset;
6610 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6611
6612 /* If condition code fails, there's nothing else to do. */
6613 if (!store_executed)
6614 return;
6615
6616 if (dsc->u.block.increment)
6617 {
6618 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6619
6620 if (dsc->u.block.before)
6621 pc_stored_at += 4;
6622 }
6623 else
6624 {
6625 pc_stored_at = dsc->u.block.xfer_addr;
6626
6627 if (dsc->u.block.before)
6628 pc_stored_at -= 4;
6629 }
6630
6631 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6632 stm_insn_addr = dsc->scratch_base;
6633 offset = pc_val - stm_insn_addr;
6634
6635 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6636 offset);
6637
6638 /* Rewrite the stored PC to the proper value for the non-displaced original
6639 instruction. */
6640 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6641 dsc->insn_addr + offset);
6642 }
6643
6644 /* Clean up an LDM which includes the PC in the register list. We clumped all
6645 the registers in the transferred list into a contiguous range r0...rX (to
6646 avoid loading PC directly and losing control of the debugged program), so we
6647 must undo that here. */
6648
6649 static void
6650 cleanup_block_load_pc (struct gdbarch *gdbarch,
6651 struct regcache *regs,
6652 arm_displaced_step_copy_insn_closure *dsc)
6653 {
6654 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6655 int load_executed = condition_true (dsc->u.block.cond, status);
6656 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6657 unsigned int regs_loaded = count_one_bits (mask);
6658 unsigned int num_to_shuffle = regs_loaded, clobbered;
6659
6660 /* The method employed here will fail if the register list is fully populated
6661 (we need to avoid loading PC directly). */
6662 gdb_assert (num_to_shuffle < 16);
6663
6664 if (!load_executed)
6665 return;
6666
6667 clobbered = (1 << num_to_shuffle) - 1;
6668
6669 while (num_to_shuffle > 0)
6670 {
6671 if ((mask & (1 << write_reg)) != 0)
6672 {
6673 unsigned int read_reg = num_to_shuffle - 1;
6674
6675 if (read_reg != write_reg)
6676 {
6677 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6678 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6679 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6680 read_reg, write_reg);
6681 }
6682 else
6683 displaced_debug_printf ("LDM: register r%d already in the right "
6684 "place", write_reg);
6685
6686 clobbered &= ~(1 << write_reg);
6687
6688 num_to_shuffle--;
6689 }
6690
6691 write_reg--;
6692 }
6693
6694 /* Restore any registers we scribbled over. */
6695 for (write_reg = 0; clobbered != 0; write_reg++)
6696 {
6697 if ((clobbered & (1 << write_reg)) != 0)
6698 {
6699 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6700 CANNOT_WRITE_PC);
6701 displaced_debug_printf ("LDM: restored clobbered register r%d",
6702 write_reg);
6703 clobbered &= ~(1 << write_reg);
6704 }
6705 }
6706
6707 /* Perform register writeback manually. */
6708 if (dsc->u.block.writeback)
6709 {
6710 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6711
6712 if (dsc->u.block.increment)
6713 new_rn_val += regs_loaded * 4;
6714 else
6715 new_rn_val -= regs_loaded * 4;
6716
6717 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6718 CANNOT_WRITE_PC);
6719 }
6720 }
6721
6722 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6723 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6724
6725 static int
6726 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6727 struct regcache *regs,
6728 arm_displaced_step_copy_insn_closure *dsc)
6729 {
6730 int load = bit (insn, 20);
6731 int user = bit (insn, 22);
6732 int increment = bit (insn, 23);
6733 int before = bit (insn, 24);
6734 int writeback = bit (insn, 21);
6735 int rn = bits (insn, 16, 19);
6736
6737 /* Block transfers which don't mention PC can be run directly
6738 out-of-line. */
6739 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6740 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6741
6742 if (rn == ARM_PC_REGNUM)
6743 {
6744 warning (_("displaced: Unpredictable LDM or STM with "
6745 "base register r15"));
6746 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6747 }
6748
6749 displaced_debug_printf ("copying block transfer insn %.8lx",
6750 (unsigned long) insn);
6751
6752 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6753 dsc->u.block.rn = rn;
6754
6755 dsc->u.block.load = load;
6756 dsc->u.block.user = user;
6757 dsc->u.block.increment = increment;
6758 dsc->u.block.before = before;
6759 dsc->u.block.writeback = writeback;
6760 dsc->u.block.cond = bits (insn, 28, 31);
6761
6762 dsc->u.block.regmask = insn & 0xffff;
6763
6764 if (load)
6765 {
6766 if ((insn & 0xffff) == 0xffff)
6767 {
6768 /* LDM with a fully-populated register list. This case is
6769 particularly tricky. Implement for now by fully emulating the
6770 instruction (which might not behave perfectly in all cases, but
6771 these instructions should be rare enough for that not to matter
6772 too much). */
6773 dsc->modinsn[0] = ARM_NOP;
6774
6775 dsc->cleanup = &cleanup_block_load_all;
6776 }
6777 else
6778 {
6779 /* LDM of a list of registers which includes PC. Implement by
6780 rewriting the list of registers to be transferred into a
6781 contiguous chunk r0...rX before doing the transfer, then shuffling
6782 registers into the correct places in the cleanup routine. */
6783 unsigned int regmask = insn & 0xffff;
6784 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6785 unsigned int i;
6786
6787 for (i = 0; i < num_in_list; i++)
6788 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6789
6790 /* Writeback makes things complicated. We need to avoid clobbering
6791 the base register with one of the registers in our modified
6792 register list, but just using a different register can't work in
6793 all cases, e.g.:
6794
6795 ldm r14!, {r0-r13,pc}
6796
6797 which would need to be rewritten as:
6798
6799 ldm rN!, {r0-r14}
6800
6801 but that can't work, because there's no free register for N.
6802
6803 Solve this by turning off the writeback bit, and emulating
6804 writeback manually in the cleanup routine. */
6805
6806 if (writeback)
6807 insn &= ~(1 << 21);
6808
6809 new_regmask = (1 << num_in_list) - 1;
6810
6811 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6812 "%.4x, modified list %.4x",
6813 rn, writeback ? "!" : "",
6814 (int) insn & 0xffff, new_regmask);
6815
6816 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6817
6818 dsc->cleanup = &cleanup_block_load_pc;
6819 }
6820 }
6821 else
6822 {
6823 /* STM of a list of registers which includes PC. Run the instruction
6824 as-is, but out of line: this will store the wrong value for the PC,
6825 so we must manually fix up the memory in the cleanup routine.
6826 Doing things this way has the advantage that we can auto-detect
6827 the offset of the PC write (which is architecture-dependent) in
6828 the cleanup routine. */
6829 dsc->modinsn[0] = insn;
6830
6831 dsc->cleanup = &cleanup_block_store_pc;
6832 }
6833
6834 return 0;
6835 }
6836
6837 static int
6838 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6839 struct regcache *regs,
6840 arm_displaced_step_copy_insn_closure *dsc)
6841 {
6842 int rn = bits (insn1, 0, 3);
6843 int load = bit (insn1, 4);
6844 int writeback = bit (insn1, 5);
6845
6846 /* Block transfers which don't mention PC can be run directly
6847 out-of-line. */
6848 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6849 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6850
6851 if (rn == ARM_PC_REGNUM)
6852 {
6853 warning (_("displaced: Unpredictable LDM or STM with "
6854 "base register r15"));
6855 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6856 "unpredictable ldm/stm", dsc);
6857 }
6858
6859 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6860 insn1, insn2);
6861
6862 /* Clear bit 13, since it should be always zero. */
6863 dsc->u.block.regmask = (insn2 & 0xdfff);
6864 dsc->u.block.rn = rn;
6865
6866 dsc->u.block.load = load;
6867 dsc->u.block.user = 0;
6868 dsc->u.block.increment = bit (insn1, 7);
6869 dsc->u.block.before = bit (insn1, 8);
6870 dsc->u.block.writeback = writeback;
6871 dsc->u.block.cond = INST_AL;
6872 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6873
6874 if (load)
6875 {
6876 if (dsc->u.block.regmask == 0xffff)
6877 {
6878 /* This branch is impossible to happen. */
6879 gdb_assert (0);
6880 }
6881 else
6882 {
6883 unsigned int regmask = dsc->u.block.regmask;
6884 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6885 unsigned int i;
6886
6887 for (i = 0; i < num_in_list; i++)
6888 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6889
6890 if (writeback)
6891 insn1 &= ~(1 << 5);
6892
6893 new_regmask = (1 << num_in_list) - 1;
6894
6895 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6896 "%.4x, modified list %.4x",
6897 rn, writeback ? "!" : "",
6898 (int) dsc->u.block.regmask, new_regmask);
6899
6900 dsc->modinsn[0] = insn1;
6901 dsc->modinsn[1] = (new_regmask & 0xffff);
6902 dsc->numinsns = 2;
6903
6904 dsc->cleanup = &cleanup_block_load_pc;
6905 }
6906 }
6907 else
6908 {
6909 dsc->modinsn[0] = insn1;
6910 dsc->modinsn[1] = insn2;
6911 dsc->numinsns = 2;
6912 dsc->cleanup = &cleanup_block_store_pc;
6913 }
6914 return 0;
6915 }
6916
6917 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6918 This is used to avoid a dependency on BFD's bfd_endian enum. */
6919
6920 ULONGEST
6921 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6922 int byte_order)
6923 {
6924 return read_memory_unsigned_integer (memaddr, len,
6925 (enum bfd_endian) byte_order);
6926 }
6927
6928 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6929
6930 CORE_ADDR
6931 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6932 CORE_ADDR val)
6933 {
6934 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6935 }
6936
6937 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6938
6939 static CORE_ADDR
6940 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6941 {
6942 return 0;
6943 }
6944
6945 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6946
6947 int
6948 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6949 {
6950 return arm_is_thumb (self->regcache);
6951 }
6952
6953 /* single_step() is called just before we want to resume the inferior,
6954 if we want to single-step it but there is no hardware or kernel
6955 single-step support. We find the target of the coming instructions
6956 and breakpoint them. */
6957
6958 std::vector<CORE_ADDR>
6959 arm_software_single_step (struct regcache *regcache)
6960 {
6961 struct gdbarch *gdbarch = regcache->arch ();
6962 struct arm_get_next_pcs next_pcs_ctx;
6963
6964 arm_get_next_pcs_ctor (&next_pcs_ctx,
6965 &arm_get_next_pcs_ops,
6966 gdbarch_byte_order (gdbarch),
6967 gdbarch_byte_order_for_code (gdbarch),
6968 0,
6969 regcache);
6970
6971 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6972
6973 for (CORE_ADDR &pc_ref : next_pcs)
6974 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6975
6976 return next_pcs;
6977 }
6978
6979 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6980 for Linux, where some SVC instructions must be treated specially. */
6981
6982 static void
6983 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6984 arm_displaced_step_copy_insn_closure *dsc)
6985 {
6986 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6987
6988 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6989 (unsigned long) resume_addr);
6990
6991 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6992 }
6993
6994
6995 /* Common copy routine for svc instruction. */
6996
6997 static int
6998 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6999 arm_displaced_step_copy_insn_closure *dsc)
7000 {
7001 /* Preparation: none.
7002 Insn: unmodified svc.
7003 Cleanup: pc <- insn_addr + insn_size. */
7004
7005 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7006 instruction. */
7007 dsc->wrote_to_pc = 1;
7008
7009 /* Allow OS-specific code to override SVC handling. */
7010 if (dsc->u.svc.copy_svc_os)
7011 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7012 else
7013 {
7014 dsc->cleanup = &cleanup_svc;
7015 return 0;
7016 }
7017 }
7018
7019 static int
7020 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7021 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7022 {
7023
7024 displaced_debug_printf ("copying svc insn %.8lx",
7025 (unsigned long) insn);
7026
7027 dsc->modinsn[0] = insn;
7028
7029 return install_svc (gdbarch, regs, dsc);
7030 }
7031
7032 static int
7033 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7034 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7035 {
7036
7037 displaced_debug_printf ("copying svc insn %.4x", insn);
7038
7039 dsc->modinsn[0] = insn;
7040
7041 return install_svc (gdbarch, regs, dsc);
7042 }
7043
7044 /* Copy undefined instructions. */
7045
7046 static int
7047 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7048 arm_displaced_step_copy_insn_closure *dsc)
7049 {
7050 displaced_debug_printf ("copying undefined insn %.8lx",
7051 (unsigned long) insn);
7052
7053 dsc->modinsn[0] = insn;
7054
7055 return 0;
7056 }
7057
7058 static int
7059 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7060 arm_displaced_step_copy_insn_closure *dsc)
7061 {
7062
7063 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7064 (unsigned short) insn1, (unsigned short) insn2);
7065
7066 dsc->modinsn[0] = insn1;
7067 dsc->modinsn[1] = insn2;
7068 dsc->numinsns = 2;
7069
7070 return 0;
7071 }
7072
7073 /* Copy unpredictable instructions. */
7074
7075 static int
7076 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7077 arm_displaced_step_copy_insn_closure *dsc)
7078 {
7079 displaced_debug_printf ("copying unpredictable insn %.8lx",
7080 (unsigned long) insn);
7081
7082 dsc->modinsn[0] = insn;
7083
7084 return 0;
7085 }
7086
7087 /* The decode_* functions are instruction decoding helpers. They mostly follow
7088 the presentation in the ARM ARM. */
7089
7090 static int
7091 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7092 struct regcache *regs,
7093 arm_displaced_step_copy_insn_closure *dsc)
7094 {
7095 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7096 unsigned int rn = bits (insn, 16, 19);
7097
7098 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7099 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7100 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7101 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7102 else if ((op1 & 0x60) == 0x20)
7103 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7104 else if ((op1 & 0x71) == 0x40)
7105 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7106 dsc);
7107 else if ((op1 & 0x77) == 0x41)
7108 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7109 else if ((op1 & 0x77) == 0x45)
7110 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7111 else if ((op1 & 0x77) == 0x51)
7112 {
7113 if (rn != 0xf)
7114 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7115 else
7116 return arm_copy_unpred (gdbarch, insn, dsc);
7117 }
7118 else if ((op1 & 0x77) == 0x55)
7119 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7120 else if (op1 == 0x57)
7121 switch (op2)
7122 {
7123 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7124 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7125 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7126 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7127 default: return arm_copy_unpred (gdbarch, insn, dsc);
7128 }
7129 else if ((op1 & 0x63) == 0x43)
7130 return arm_copy_unpred (gdbarch, insn, dsc);
7131 else if ((op2 & 0x1) == 0x0)
7132 switch (op1 & ~0x80)
7133 {
7134 case 0x61:
7135 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7136 case 0x65:
7137 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7138 case 0x71: case 0x75:
7139 /* pld/pldw reg. */
7140 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7141 case 0x63: case 0x67: case 0x73: case 0x77:
7142 return arm_copy_unpred (gdbarch, insn, dsc);
7143 default:
7144 return arm_copy_undef (gdbarch, insn, dsc);
7145 }
7146 else
7147 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7148 }
7149
7150 static int
7151 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7152 struct regcache *regs,
7153 arm_displaced_step_copy_insn_closure *dsc)
7154 {
7155 if (bit (insn, 27) == 0)
7156 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7157 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7158 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7159 {
7160 case 0x0: case 0x2:
7161 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7162
7163 case 0x1: case 0x3:
7164 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7165
7166 case 0x4: case 0x5: case 0x6: case 0x7:
7167 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7168
7169 case 0x8:
7170 switch ((insn & 0xe00000) >> 21)
7171 {
7172 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7173 /* stc/stc2. */
7174 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7175
7176 case 0x2:
7177 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7178
7179 default:
7180 return arm_copy_undef (gdbarch, insn, dsc);
7181 }
7182
7183 case 0x9:
7184 {
7185 int rn_f = (bits (insn, 16, 19) == 0xf);
7186 switch ((insn & 0xe00000) >> 21)
7187 {
7188 case 0x1: case 0x3:
7189 /* ldc/ldc2 imm (undefined for rn == pc). */
7190 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7191 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7192
7193 case 0x2:
7194 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7195
7196 case 0x4: case 0x5: case 0x6: case 0x7:
7197 /* ldc/ldc2 lit (undefined for rn != pc). */
7198 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7199 : arm_copy_undef (gdbarch, insn, dsc);
7200
7201 default:
7202 return arm_copy_undef (gdbarch, insn, dsc);
7203 }
7204 }
7205
7206 case 0xa:
7207 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7208
7209 case 0xb:
7210 if (bits (insn, 16, 19) == 0xf)
7211 /* ldc/ldc2 lit. */
7212 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7213 else
7214 return arm_copy_undef (gdbarch, insn, dsc);
7215
7216 case 0xc:
7217 if (bit (insn, 4))
7218 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7219 else
7220 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7221
7222 case 0xd:
7223 if (bit (insn, 4))
7224 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7225 else
7226 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7227
7228 default:
7229 return arm_copy_undef (gdbarch, insn, dsc);
7230 }
7231 }
7232
7233 /* Decode miscellaneous instructions in dp/misc encoding space. */
7234
7235 static int
7236 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7237 struct regcache *regs,
7238 arm_displaced_step_copy_insn_closure *dsc)
7239 {
7240 unsigned int op2 = bits (insn, 4, 6);
7241 unsigned int op = bits (insn, 21, 22);
7242
7243 switch (op2)
7244 {
7245 case 0x0:
7246 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7247
7248 case 0x1:
7249 if (op == 0x1) /* bx. */
7250 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7251 else if (op == 0x3)
7252 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7253 else
7254 return arm_copy_undef (gdbarch, insn, dsc);
7255
7256 case 0x2:
7257 if (op == 0x1)
7258 /* Not really supported. */
7259 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7260 else
7261 return arm_copy_undef (gdbarch, insn, dsc);
7262
7263 case 0x3:
7264 if (op == 0x1)
7265 return arm_copy_bx_blx_reg (gdbarch, insn,
7266 regs, dsc); /* blx register. */
7267 else
7268 return arm_copy_undef (gdbarch, insn, dsc);
7269
7270 case 0x5:
7271 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7272
7273 case 0x7:
7274 if (op == 0x1)
7275 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7276 else if (op == 0x3)
7277 /* Not really supported. */
7278 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7279 /* Fall through. */
7280
7281 default:
7282 return arm_copy_undef (gdbarch, insn, dsc);
7283 }
7284 }
7285
7286 static int
7287 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7288 struct regcache *regs,
7289 arm_displaced_step_copy_insn_closure *dsc)
7290 {
7291 if (bit (insn, 25))
7292 switch (bits (insn, 20, 24))
7293 {
7294 case 0x10:
7295 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7296
7297 case 0x14:
7298 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7299
7300 case 0x12: case 0x16:
7301 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7302
7303 default:
7304 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7305 }
7306 else
7307 {
7308 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7309
7310 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7311 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7312 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7313 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7314 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7315 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7316 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7317 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7318 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7319 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7320 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7321 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7322 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7323 /* 2nd arg means "unprivileged". */
7324 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7325 dsc);
7326 }
7327
7328 /* Should be unreachable. */
7329 return 1;
7330 }
7331
7332 static int
7333 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7334 struct regcache *regs,
7335 arm_displaced_step_copy_insn_closure *dsc)
7336 {
7337 int a = bit (insn, 25), b = bit (insn, 4);
7338 uint32_t op1 = bits (insn, 20, 24);
7339
7340 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7341 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7342 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7343 else if ((!a && (op1 & 0x17) == 0x02)
7344 || (a && (op1 & 0x17) == 0x02 && !b))
7345 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7346 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7347 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7348 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7349 else if ((!a && (op1 & 0x17) == 0x03)
7350 || (a && (op1 & 0x17) == 0x03 && !b))
7351 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7352 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7353 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7354 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7355 else if ((!a && (op1 & 0x17) == 0x06)
7356 || (a && (op1 & 0x17) == 0x06 && !b))
7357 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7358 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7359 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7360 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7361 else if ((!a && (op1 & 0x17) == 0x07)
7362 || (a && (op1 & 0x17) == 0x07 && !b))
7363 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7364
7365 /* Should be unreachable. */
7366 return 1;
7367 }
7368
7369 static int
7370 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7371 arm_displaced_step_copy_insn_closure *dsc)
7372 {
7373 switch (bits (insn, 20, 24))
7374 {
7375 case 0x00: case 0x01: case 0x02: case 0x03:
7376 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7377
7378 case 0x04: case 0x05: case 0x06: case 0x07:
7379 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7380
7381 case 0x08: case 0x09: case 0x0a: case 0x0b:
7382 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7383 return arm_copy_unmodified (gdbarch, insn,
7384 "decode/pack/unpack/saturate/reverse", dsc);
7385
7386 case 0x18:
7387 if (bits (insn, 5, 7) == 0) /* op2. */
7388 {
7389 if (bits (insn, 12, 15) == 0xf)
7390 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7391 else
7392 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7393 }
7394 else
7395 return arm_copy_undef (gdbarch, insn, dsc);
7396
7397 case 0x1a: case 0x1b:
7398 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7399 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7400 else
7401 return arm_copy_undef (gdbarch, insn, dsc);
7402
7403 case 0x1c: case 0x1d:
7404 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7405 {
7406 if (bits (insn, 0, 3) == 0xf)
7407 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7408 else
7409 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7410 }
7411 else
7412 return arm_copy_undef (gdbarch, insn, dsc);
7413
7414 case 0x1e: case 0x1f:
7415 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7416 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7417 else
7418 return arm_copy_undef (gdbarch, insn, dsc);
7419 }
7420
7421 /* Should be unreachable. */
7422 return 1;
7423 }
7424
7425 static int
7426 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7427 struct regcache *regs,
7428 arm_displaced_step_copy_insn_closure *dsc)
7429 {
7430 if (bit (insn, 25))
7431 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7432 else
7433 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7434 }
7435
7436 static int
7437 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7438 struct regcache *regs,
7439 arm_displaced_step_copy_insn_closure *dsc)
7440 {
7441 unsigned int opcode = bits (insn, 20, 24);
7442
7443 switch (opcode)
7444 {
7445 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7446 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7447
7448 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7449 case 0x12: case 0x16:
7450 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7451
7452 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7453 case 0x13: case 0x17:
7454 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7455
7456 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7457 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7458 /* Note: no writeback for these instructions. Bit 25 will always be
7459 zero though (via caller), so the following works OK. */
7460 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7461 }
7462
7463 /* Should be unreachable. */
7464 return 1;
7465 }
7466
7467 /* Decode shifted register instructions. */
7468
7469 static int
7470 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7471 uint16_t insn2, struct regcache *regs,
7472 arm_displaced_step_copy_insn_closure *dsc)
7473 {
7474 /* PC is only allowed to be used in instruction MOV. */
7475
7476 unsigned int op = bits (insn1, 5, 8);
7477 unsigned int rn = bits (insn1, 0, 3);
7478
7479 if (op == 0x2 && rn == 0xf) /* MOV */
7480 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7481 else
7482 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7483 "dp (shift reg)", dsc);
7484 }
7485
7486
7487 /* Decode extension register load/store. Exactly the same as
7488 arm_decode_ext_reg_ld_st. */
7489
7490 static int
7491 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7492 uint16_t insn2, struct regcache *regs,
7493 arm_displaced_step_copy_insn_closure *dsc)
7494 {
7495 unsigned int opcode = bits (insn1, 4, 8);
7496
7497 switch (opcode)
7498 {
7499 case 0x04: case 0x05:
7500 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7501 "vfp/neon vmov", dsc);
7502
7503 case 0x08: case 0x0c: /* 01x00 */
7504 case 0x0a: case 0x0e: /* 01x10 */
7505 case 0x12: case 0x16: /* 10x10 */
7506 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "vfp/neon vstm/vpush", dsc);
7508
7509 case 0x09: case 0x0d: /* 01x01 */
7510 case 0x0b: case 0x0f: /* 01x11 */
7511 case 0x13: case 0x17: /* 10x11 */
7512 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7513 "vfp/neon vldm/vpop", dsc);
7514
7515 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7516 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7517 "vstr", dsc);
7518 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7519 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7520 }
7521
7522 /* Should be unreachable. */
7523 return 1;
7524 }
7525
7526 static int
7527 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7528 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7529 {
7530 unsigned int op1 = bits (insn, 20, 25);
7531 int op = bit (insn, 4);
7532 unsigned int coproc = bits (insn, 8, 11);
7533
7534 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7535 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7536 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7537 && (coproc & 0xe) != 0xa)
7538 /* stc/stc2. */
7539 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7540 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7541 && (coproc & 0xe) != 0xa)
7542 /* ldc/ldc2 imm/lit. */
7543 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7544 else if ((op1 & 0x3e) == 0x00)
7545 return arm_copy_undef (gdbarch, insn, dsc);
7546 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7547 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7548 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7549 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7550 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7551 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7552 else if ((op1 & 0x30) == 0x20 && !op)
7553 {
7554 if ((coproc & 0xe) == 0xa)
7555 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7556 else
7557 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7558 }
7559 else if ((op1 & 0x30) == 0x20 && op)
7560 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7561 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7562 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7563 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7564 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7565 else if ((op1 & 0x30) == 0x30)
7566 return arm_copy_svc (gdbarch, insn, regs, dsc);
7567 else
7568 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7569 }
7570
7571 static int
7572 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7573 uint16_t insn2, struct regcache *regs,
7574 arm_displaced_step_copy_insn_closure *dsc)
7575 {
7576 unsigned int coproc = bits (insn2, 8, 11);
7577 unsigned int bit_5_8 = bits (insn1, 5, 8);
7578 unsigned int bit_9 = bit (insn1, 9);
7579 unsigned int bit_4 = bit (insn1, 4);
7580
7581 if (bit_9 == 0)
7582 {
7583 if (bit_5_8 == 2)
7584 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7585 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7586 dsc);
7587 else if (bit_5_8 == 0) /* UNDEFINED. */
7588 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7589 else
7590 {
7591 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7592 if ((coproc & 0xe) == 0xa)
7593 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7594 dsc);
7595 else /* coproc is not 101x. */
7596 {
7597 if (bit_4 == 0) /* STC/STC2. */
7598 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7599 "stc/stc2", dsc);
7600 else /* LDC/LDC2 {literal, immediate}. */
7601 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7602 regs, dsc);
7603 }
7604 }
7605 }
7606 else
7607 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7608
7609 return 0;
7610 }
7611
7612 static void
7613 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7614 arm_displaced_step_copy_insn_closure *dsc, int rd)
7615 {
7616 /* ADR Rd, #imm
7617
7618 Rewrite as:
7619
7620 Preparation: Rd <- PC
7621 Insn: ADD Rd, #imm
7622 Cleanup: Null.
7623 */
7624
7625 /* Rd <- PC */
7626 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7627 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7628 }
7629
7630 static int
7631 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7632 arm_displaced_step_copy_insn_closure *dsc,
7633 int rd, unsigned int imm)
7634 {
7635
7636 /* Encoding T2: ADDS Rd, #imm */
7637 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7638
7639 install_pc_relative (gdbarch, regs, dsc, rd);
7640
7641 return 0;
7642 }
7643
7644 static int
7645 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7646 struct regcache *regs,
7647 arm_displaced_step_copy_insn_closure *dsc)
7648 {
7649 unsigned int rd = bits (insn, 8, 10);
7650 unsigned int imm8 = bits (insn, 0, 7);
7651
7652 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7653 rd, imm8, insn);
7654
7655 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7656 }
7657
7658 static int
7659 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7660 uint16_t insn2, struct regcache *regs,
7661 arm_displaced_step_copy_insn_closure *dsc)
7662 {
7663 unsigned int rd = bits (insn2, 8, 11);
7664 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7665 extract raw immediate encoding rather than computing immediate. When
7666 generating ADD or SUB instruction, we can simply perform OR operation to
7667 set immediate into ADD. */
7668 unsigned int imm_3_8 = insn2 & 0x70ff;
7669 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7670
7671 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7672 rd, imm_i, imm_3_8, insn1, insn2);
7673
7674 if (bit (insn1, 7)) /* Encoding T2 */
7675 {
7676 /* Encoding T3: SUB Rd, Rd, #imm */
7677 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7678 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7679 }
7680 else /* Encoding T3 */
7681 {
7682 /* Encoding T3: ADD Rd, Rd, #imm */
7683 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7684 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7685 }
7686 dsc->numinsns = 2;
7687
7688 install_pc_relative (gdbarch, regs, dsc, rd);
7689
7690 return 0;
7691 }
7692
7693 static int
7694 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7695 struct regcache *regs,
7696 arm_displaced_step_copy_insn_closure *dsc)
7697 {
7698 unsigned int rt = bits (insn1, 8, 10);
7699 unsigned int pc;
7700 int imm8 = (bits (insn1, 0, 7) << 2);
7701
7702 /* LDR Rd, #imm8
7703
7704 Rwrite as:
7705
7706 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7707
7708 Insn: LDR R0, [R2, R3];
7709 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7710
7711 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7712
7713 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7714 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7715 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7716 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7717 /* The assembler calculates the required value of the offset from the
7718 Align(PC,4) value of this instruction to the label. */
7719 pc = pc & 0xfffffffc;
7720
7721 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7722 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7723
7724 dsc->rd = rt;
7725 dsc->u.ldst.xfersize = 4;
7726 dsc->u.ldst.rn = 0;
7727 dsc->u.ldst.immed = 0;
7728 dsc->u.ldst.writeback = 0;
7729 dsc->u.ldst.restore_r4 = 0;
7730
7731 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7732
7733 dsc->cleanup = &cleanup_load;
7734
7735 return 0;
7736 }
7737
7738 /* Copy Thumb cbnz/cbz instruction. */
7739
7740 static int
7741 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7742 struct regcache *regs,
7743 arm_displaced_step_copy_insn_closure *dsc)
7744 {
7745 int non_zero = bit (insn1, 11);
7746 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7747 CORE_ADDR from = dsc->insn_addr;
7748 int rn = bits (insn1, 0, 2);
7749 int rn_val = displaced_read_reg (regs, dsc, rn);
7750
7751 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7752 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7753 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7754 condition is false, let it be, cleanup_branch will do nothing. */
7755 if (dsc->u.branch.cond)
7756 {
7757 dsc->u.branch.cond = INST_AL;
7758 dsc->u.branch.dest = from + 4 + imm5;
7759 }
7760 else
7761 dsc->u.branch.dest = from + 2;
7762
7763 dsc->u.branch.link = 0;
7764 dsc->u.branch.exchange = 0;
7765
7766 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7767 non_zero ? "cbnz" : "cbz",
7768 rn, rn_val, insn1, dsc->u.branch.dest);
7769
7770 dsc->modinsn[0] = THUMB_NOP;
7771
7772 dsc->cleanup = &cleanup_branch;
7773 return 0;
7774 }
7775
7776 /* Copy Table Branch Byte/Halfword */
7777 static int
7778 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7779 uint16_t insn2, struct regcache *regs,
7780 arm_displaced_step_copy_insn_closure *dsc)
7781 {
7782 ULONGEST rn_val, rm_val;
7783 int is_tbh = bit (insn2, 4);
7784 CORE_ADDR halfwords = 0;
7785 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7786
7787 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7788 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7789
7790 if (is_tbh)
7791 {
7792 gdb_byte buf[2];
7793
7794 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7795 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7796 }
7797 else
7798 {
7799 gdb_byte buf[1];
7800
7801 target_read_memory (rn_val + rm_val, buf, 1);
7802 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7803 }
7804
7805 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7806 is_tbh ? "tbh" : "tbb",
7807 (unsigned int) rn_val, (unsigned int) rm_val,
7808 (unsigned int) halfwords);
7809
7810 dsc->u.branch.cond = INST_AL;
7811 dsc->u.branch.link = 0;
7812 dsc->u.branch.exchange = 0;
7813 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7814
7815 dsc->cleanup = &cleanup_branch;
7816
7817 return 0;
7818 }
7819
7820 static void
7821 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7822 arm_displaced_step_copy_insn_closure *dsc)
7823 {
7824 /* PC <- r7 */
7825 int val = displaced_read_reg (regs, dsc, 7);
7826 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7827
7828 /* r7 <- r8 */
7829 val = displaced_read_reg (regs, dsc, 8);
7830 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7831
7832 /* r8 <- tmp[0] */
7833 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7834
7835 }
7836
7837 static int
7838 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7839 struct regcache *regs,
7840 arm_displaced_step_copy_insn_closure *dsc)
7841 {
7842 dsc->u.block.regmask = insn1 & 0x00ff;
7843
7844 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7845 to :
7846
7847 (1) register list is full, that is, r0-r7 are used.
7848 Prepare: tmp[0] <- r8
7849
7850 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7851 MOV r8, r7; Move value of r7 to r8;
7852 POP {r7}; Store PC value into r7.
7853
7854 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7855
7856 (2) register list is not full, supposing there are N registers in
7857 register list (except PC, 0 <= N <= 7).
7858 Prepare: for each i, 0 - N, tmp[i] <- ri.
7859
7860 POP {r0, r1, ...., rN};
7861
7862 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7863 from tmp[] properly.
7864 */
7865 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7866 dsc->u.block.regmask, insn1);
7867
7868 if (dsc->u.block.regmask == 0xff)
7869 {
7870 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7871
7872 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7873 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7874 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7875
7876 dsc->numinsns = 3;
7877 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7878 }
7879 else
7880 {
7881 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7882 unsigned int i;
7883 unsigned int new_regmask;
7884
7885 for (i = 0; i < num_in_list + 1; i++)
7886 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7887
7888 new_regmask = (1 << (num_in_list + 1)) - 1;
7889
7890 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7891 "modified list %.4x",
7892 (int) dsc->u.block.regmask, new_regmask);
7893
7894 dsc->u.block.regmask |= 0x8000;
7895 dsc->u.block.writeback = 0;
7896 dsc->u.block.cond = INST_AL;
7897
7898 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7899
7900 dsc->cleanup = &cleanup_block_load_pc;
7901 }
7902
7903 return 0;
7904 }
7905
7906 static void
7907 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7908 struct regcache *regs,
7909 arm_displaced_step_copy_insn_closure *dsc)
7910 {
7911 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7912 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7913 int err = 0;
7914
7915 /* 16-bit thumb instructions. */
7916 switch (op_bit_12_15)
7917 {
7918 /* Shift (imme), add, subtract, move and compare. */
7919 case 0: case 1: case 2: case 3:
7920 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7921 "shift/add/sub/mov/cmp",
7922 dsc);
7923 break;
7924 case 4:
7925 switch (op_bit_10_11)
7926 {
7927 case 0: /* Data-processing */
7928 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7929 "data-processing",
7930 dsc);
7931 break;
7932 case 1: /* Special data instructions and branch and exchange. */
7933 {
7934 unsigned short op = bits (insn1, 7, 9);
7935 if (op == 6 || op == 7) /* BX or BLX */
7936 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7937 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7938 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7939 else
7940 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7941 dsc);
7942 }
7943 break;
7944 default: /* LDR (literal) */
7945 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7946 }
7947 break;
7948 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7949 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7950 break;
7951 case 10:
7952 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7953 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7954 else /* Generate SP-relative address */
7955 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7956 break;
7957 case 11: /* Misc 16-bit instructions */
7958 {
7959 switch (bits (insn1, 8, 11))
7960 {
7961 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7962 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7963 break;
7964 case 12: case 13: /* POP */
7965 if (bit (insn1, 8)) /* PC is in register list. */
7966 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7967 else
7968 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7969 break;
7970 case 15: /* If-Then, and hints */
7971 if (bits (insn1, 0, 3))
7972 /* If-Then makes up to four following instructions conditional.
7973 IT instruction itself is not conditional, so handle it as a
7974 common unmodified instruction. */
7975 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7976 dsc);
7977 else
7978 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7979 break;
7980 default:
7981 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7982 }
7983 }
7984 break;
7985 case 12:
7986 if (op_bit_10_11 < 2) /* Store multiple registers */
7987 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7988 else /* Load multiple registers */
7989 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7990 break;
7991 case 13: /* Conditional branch and supervisor call */
7992 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7993 err = thumb_copy_b (gdbarch, insn1, dsc);
7994 else
7995 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7996 break;
7997 case 14: /* Unconditional branch */
7998 err = thumb_copy_b (gdbarch, insn1, dsc);
7999 break;
8000 default:
8001 err = 1;
8002 }
8003
8004 if (err)
8005 internal_error (__FILE__, __LINE__,
8006 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8007 }
8008
8009 static int
8010 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8011 uint16_t insn1, uint16_t insn2,
8012 struct regcache *regs,
8013 arm_displaced_step_copy_insn_closure *dsc)
8014 {
8015 int rt = bits (insn2, 12, 15);
8016 int rn = bits (insn1, 0, 3);
8017 int op1 = bits (insn1, 7, 8);
8018
8019 switch (bits (insn1, 5, 6))
8020 {
8021 case 0: /* Load byte and memory hints */
8022 if (rt == 0xf) /* PLD/PLI */
8023 {
8024 if (rn == 0xf)
8025 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8026 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8027 else
8028 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8029 "pli/pld", dsc);
8030 }
8031 else
8032 {
8033 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8034 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8035 1);
8036 else
8037 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8038 "ldrb{reg, immediate}/ldrbt",
8039 dsc);
8040 }
8041
8042 break;
8043 case 1: /* Load halfword and memory hints. */
8044 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8045 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8046 "pld/unalloc memhint", dsc);
8047 else
8048 {
8049 if (rn == 0xf)
8050 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8051 2);
8052 else
8053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8054 "ldrh/ldrht", dsc);
8055 }
8056 break;
8057 case 2: /* Load word */
8058 {
8059 int insn2_bit_8_11 = bits (insn2, 8, 11);
8060
8061 if (rn == 0xf)
8062 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8063 else if (op1 == 0x1) /* Encoding T3 */
8064 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8065 0, 1);
8066 else /* op1 == 0x0 */
8067 {
8068 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8069 /* LDR (immediate) */
8070 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8071 dsc, bit (insn2, 8), 1);
8072 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8073 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8074 "ldrt", dsc);
8075 else
8076 /* LDR (register) */
8077 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8078 dsc, 0, 0);
8079 }
8080 break;
8081 }
8082 default:
8083 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8084 break;
8085 }
8086 return 0;
8087 }
8088
8089 static void
8090 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8091 uint16_t insn2, struct regcache *regs,
8092 arm_displaced_step_copy_insn_closure *dsc)
8093 {
8094 int err = 0;
8095 unsigned short op = bit (insn2, 15);
8096 unsigned int op1 = bits (insn1, 11, 12);
8097
8098 switch (op1)
8099 {
8100 case 1:
8101 {
8102 switch (bits (insn1, 9, 10))
8103 {
8104 case 0:
8105 if (bit (insn1, 6))
8106 {
8107 /* Load/store {dual, exclusive}, table branch. */
8108 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8109 && bits (insn2, 5, 7) == 0)
8110 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8111 dsc);
8112 else
8113 /* PC is not allowed to use in load/store {dual, exclusive}
8114 instructions. */
8115 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8116 "load/store dual/ex", dsc);
8117 }
8118 else /* load/store multiple */
8119 {
8120 switch (bits (insn1, 7, 8))
8121 {
8122 case 0: case 3: /* SRS, RFE */
8123 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8124 "srs/rfe", dsc);
8125 break;
8126 case 1: case 2: /* LDM/STM/PUSH/POP */
8127 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8128 break;
8129 }
8130 }
8131 break;
8132
8133 case 1:
8134 /* Data-processing (shift register). */
8135 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8136 dsc);
8137 break;
8138 default: /* Coprocessor instructions. */
8139 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8140 break;
8141 }
8142 break;
8143 }
8144 case 2: /* op1 = 2 */
8145 if (op) /* Branch and misc control. */
8146 {
8147 if (bit (insn2, 14) /* BLX/BL */
8148 || bit (insn2, 12) /* Unconditional branch */
8149 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8150 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8151 else
8152 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8153 "misc ctrl", dsc);
8154 }
8155 else
8156 {
8157 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8158 {
8159 int dp_op = bits (insn1, 4, 8);
8160 int rn = bits (insn1, 0, 3);
8161 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8162 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8163 regs, dsc);
8164 else
8165 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8166 "dp/pb", dsc);
8167 }
8168 else /* Data processing (modified immediate) */
8169 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8170 "dp/mi", dsc);
8171 }
8172 break;
8173 case 3: /* op1 = 3 */
8174 switch (bits (insn1, 9, 10))
8175 {
8176 case 0:
8177 if (bit (insn1, 4))
8178 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8179 regs, dsc);
8180 else /* NEON Load/Store and Store single data item */
8181 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8182 "neon elt/struct load/store",
8183 dsc);
8184 break;
8185 case 1: /* op1 = 3, bits (9, 10) == 1 */
8186 switch (bits (insn1, 7, 8))
8187 {
8188 case 0: case 1: /* Data processing (register) */
8189 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8190 "dp(reg)", dsc);
8191 break;
8192 case 2: /* Multiply and absolute difference */
8193 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8194 "mul/mua/diff", dsc);
8195 break;
8196 case 3: /* Long multiply and divide */
8197 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8198 "lmul/lmua", dsc);
8199 break;
8200 }
8201 break;
8202 default: /* Coprocessor instructions */
8203 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8204 break;
8205 }
8206 break;
8207 default:
8208 err = 1;
8209 }
8210
8211 if (err)
8212 internal_error (__FILE__, __LINE__,
8213 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8214
8215 }
8216
8217 static void
8218 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8219 struct regcache *regs,
8220 arm_displaced_step_copy_insn_closure *dsc)
8221 {
8222 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8223 uint16_t insn1
8224 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8225
8226 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8227 insn1, (unsigned long) from);
8228
8229 dsc->is_thumb = 1;
8230 dsc->insn_size = thumb_insn_size (insn1);
8231 if (thumb_insn_size (insn1) == 4)
8232 {
8233 uint16_t insn2
8234 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8235 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8236 }
8237 else
8238 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8239 }
8240
8241 void
8242 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8243 CORE_ADDR to, struct regcache *regs,
8244 arm_displaced_step_copy_insn_closure *dsc)
8245 {
8246 int err = 0;
8247 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8248 uint32_t insn;
8249
8250 /* Most displaced instructions use a 1-instruction scratch space, so set this
8251 here and override below if/when necessary. */
8252 dsc->numinsns = 1;
8253 dsc->insn_addr = from;
8254 dsc->scratch_base = to;
8255 dsc->cleanup = NULL;
8256 dsc->wrote_to_pc = 0;
8257
8258 if (!displaced_in_arm_mode (regs))
8259 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8260
8261 dsc->is_thumb = 0;
8262 dsc->insn_size = 4;
8263 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8264 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8265 (unsigned long) insn, (unsigned long) from);
8266
8267 if ((insn & 0xf0000000) == 0xf0000000)
8268 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8269 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8270 {
8271 case 0x0: case 0x1: case 0x2: case 0x3:
8272 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8273 break;
8274
8275 case 0x4: case 0x5: case 0x6:
8276 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8277 break;
8278
8279 case 0x7:
8280 err = arm_decode_media (gdbarch, insn, dsc);
8281 break;
8282
8283 case 0x8: case 0x9: case 0xa: case 0xb:
8284 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8285 break;
8286
8287 case 0xc: case 0xd: case 0xe: case 0xf:
8288 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8289 break;
8290 }
8291
8292 if (err)
8293 internal_error (__FILE__, __LINE__,
8294 _("arm_process_displaced_insn: Instruction decode error"));
8295 }
8296
8297 /* Actually set up the scratch space for a displaced instruction. */
8298
8299 void
8300 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8301 CORE_ADDR to,
8302 arm_displaced_step_copy_insn_closure *dsc)
8303 {
8304 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8305 unsigned int i, len, offset;
8306 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8307 int size = dsc->is_thumb? 2 : 4;
8308 const gdb_byte *bkp_insn;
8309
8310 offset = 0;
8311 /* Poke modified instruction(s). */
8312 for (i = 0; i < dsc->numinsns; i++)
8313 {
8314 if (size == 4)
8315 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8316 dsc->modinsn[i], (unsigned long) to + offset);
8317 else if (size == 2)
8318 displaced_debug_printf ("writing insn %.4x at %.8lx",
8319 (unsigned short) dsc->modinsn[i],
8320 (unsigned long) to + offset);
8321
8322 write_memory_unsigned_integer (to + offset, size,
8323 byte_order_for_code,
8324 dsc->modinsn[i]);
8325 offset += size;
8326 }
8327
8328 /* Choose the correct breakpoint instruction. */
8329 if (dsc->is_thumb)
8330 {
8331 bkp_insn = tdep->thumb_breakpoint;
8332 len = tdep->thumb_breakpoint_size;
8333 }
8334 else
8335 {
8336 bkp_insn = tdep->arm_breakpoint;
8337 len = tdep->arm_breakpoint_size;
8338 }
8339
8340 /* Put breakpoint afterwards. */
8341 write_memory (to + offset, bkp_insn, len);
8342
8343 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8344 paddress (gdbarch, to));
8345 }
8346
8347 /* Entry point for cleaning things up after a displaced instruction has been
8348 single-stepped. */
8349
8350 void
8351 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8352 struct displaced_step_copy_insn_closure *dsc_,
8353 CORE_ADDR from, CORE_ADDR to,
8354 struct regcache *regs)
8355 {
8356 arm_displaced_step_copy_insn_closure *dsc
8357 = (arm_displaced_step_copy_insn_closure *) dsc_;
8358
8359 if (dsc->cleanup)
8360 dsc->cleanup (gdbarch, regs, dsc);
8361
8362 if (!dsc->wrote_to_pc)
8363 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8364 dsc->insn_addr + dsc->insn_size);
8365
8366 }
8367
8368 #include "bfd-in2.h"
8369 #include "libcoff.h"
8370
8371 static int
8372 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8373 {
8374 gdb_disassemble_info *di
8375 = static_cast<gdb_disassemble_info *> (info->application_data);
8376 struct gdbarch *gdbarch = di->arch ();
8377
8378 if (arm_pc_is_thumb (gdbarch, memaddr))
8379 {
8380 static asymbol *asym;
8381 static combined_entry_type ce;
8382 static struct coff_symbol_struct csym;
8383 static struct bfd fake_bfd;
8384 static bfd_target fake_target;
8385
8386 if (csym.native == NULL)
8387 {
8388 /* Create a fake symbol vector containing a Thumb symbol.
8389 This is solely so that the code in print_insn_little_arm()
8390 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8391 the presence of a Thumb symbol and switch to decoding
8392 Thumb instructions. */
8393
8394 fake_target.flavour = bfd_target_coff_flavour;
8395 fake_bfd.xvec = &fake_target;
8396 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8397 csym.native = &ce;
8398 csym.symbol.the_bfd = &fake_bfd;
8399 csym.symbol.name = "fake";
8400 asym = (asymbol *) & csym;
8401 }
8402
8403 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8404 info->symbols = &asym;
8405 }
8406 else
8407 info->symbols = NULL;
8408
8409 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8410 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8411 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8412 the assert on the mismatch of info->mach and
8413 bfd_get_mach (current_program_space->exec_bfd ()) in
8414 default_print_insn. */
8415 if (current_program_space->exec_bfd () != NULL
8416 && (current_program_space->exec_bfd ()->arch_info
8417 == gdbarch_bfd_arch_info (gdbarch)))
8418 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8419
8420 return default_print_insn (memaddr, info);
8421 }
8422
8423 /* The following define instruction sequences that will cause ARM
8424 cpu's to take an undefined instruction trap. These are used to
8425 signal a breakpoint to GDB.
8426
8427 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8428 modes. A different instruction is required for each mode. The ARM
8429 cpu's can also be big or little endian. Thus four different
8430 instructions are needed to support all cases.
8431
8432 Note: ARMv4 defines several new instructions that will take the
8433 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8434 not in fact add the new instructions. The new undefined
8435 instructions in ARMv4 are all instructions that had no defined
8436 behaviour in earlier chips. There is no guarantee that they will
8437 raise an exception, but may be treated as NOP's. In practice, it
8438 may only safe to rely on instructions matching:
8439
8440 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8441 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8442 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8443
8444 Even this may only true if the condition predicate is true. The
8445 following use a condition predicate of ALWAYS so it is always TRUE.
8446
8447 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8448 and NetBSD all use a software interrupt rather than an undefined
8449 instruction to force a trap. This can be handled by by the
8450 abi-specific code during establishment of the gdbarch vector. */
8451
8452 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8453 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8454 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8455 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8456
8457 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8458 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8459 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8460 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8461
8462 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8463
8464 static int
8465 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8466 {
8467 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8468 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8469
8470 if (arm_pc_is_thumb (gdbarch, *pcptr))
8471 {
8472 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8473
8474 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8475 check whether we are replacing a 32-bit instruction. */
8476 if (tdep->thumb2_breakpoint != NULL)
8477 {
8478 gdb_byte buf[2];
8479
8480 if (target_read_memory (*pcptr, buf, 2) == 0)
8481 {
8482 unsigned short inst1;
8483
8484 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8485 if (thumb_insn_size (inst1) == 4)
8486 return ARM_BP_KIND_THUMB2;
8487 }
8488 }
8489
8490 return ARM_BP_KIND_THUMB;
8491 }
8492 else
8493 return ARM_BP_KIND_ARM;
8494
8495 }
8496
8497 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8498
8499 static const gdb_byte *
8500 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8501 {
8502 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8503
8504 switch (kind)
8505 {
8506 case ARM_BP_KIND_ARM:
8507 *size = tdep->arm_breakpoint_size;
8508 return tdep->arm_breakpoint;
8509 case ARM_BP_KIND_THUMB:
8510 *size = tdep->thumb_breakpoint_size;
8511 return tdep->thumb_breakpoint;
8512 case ARM_BP_KIND_THUMB2:
8513 *size = tdep->thumb2_breakpoint_size;
8514 return tdep->thumb2_breakpoint;
8515 default:
8516 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8517 }
8518 }
8519
8520 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8521
8522 static int
8523 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8524 struct regcache *regcache,
8525 CORE_ADDR *pcptr)
8526 {
8527 gdb_byte buf[4];
8528
8529 /* Check the memory pointed by PC is readable. */
8530 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8531 {
8532 struct arm_get_next_pcs next_pcs_ctx;
8533
8534 arm_get_next_pcs_ctor (&next_pcs_ctx,
8535 &arm_get_next_pcs_ops,
8536 gdbarch_byte_order (gdbarch),
8537 gdbarch_byte_order_for_code (gdbarch),
8538 0,
8539 regcache);
8540
8541 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8542
8543 /* If MEMADDR is the next instruction of current pc, do the
8544 software single step computation, and get the thumb mode by
8545 the destination address. */
8546 for (CORE_ADDR pc : next_pcs)
8547 {
8548 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8549 {
8550 if (IS_THUMB_ADDR (pc))
8551 {
8552 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8553 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8554 }
8555 else
8556 return ARM_BP_KIND_ARM;
8557 }
8558 }
8559 }
8560
8561 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8562 }
8563
8564 /* Extract from an array REGBUF containing the (raw) register state a
8565 function return value of type TYPE, and copy that, in virtual
8566 format, into VALBUF. */
8567
8568 static void
8569 arm_extract_return_value (struct type *type, struct regcache *regs,
8570 gdb_byte *valbuf)
8571 {
8572 struct gdbarch *gdbarch = regs->arch ();
8573 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8574 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8575
8576 if (TYPE_CODE_FLT == type->code ())
8577 {
8578 switch (tdep->fp_model)
8579 {
8580 case ARM_FLOAT_FPA:
8581 {
8582 /* The value is in register F0 in internal format. We need to
8583 extract the raw value and then convert it to the desired
8584 internal type. */
8585 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8586
8587 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8588 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8589 valbuf, type);
8590 }
8591 break;
8592
8593 case ARM_FLOAT_SOFT_FPA:
8594 case ARM_FLOAT_SOFT_VFP:
8595 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8596 not using the VFP ABI code. */
8597 case ARM_FLOAT_VFP:
8598 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8599 if (TYPE_LENGTH (type) > 4)
8600 regs->cooked_read (ARM_A1_REGNUM + 1,
8601 valbuf + ARM_INT_REGISTER_SIZE);
8602 break;
8603
8604 default:
8605 internal_error (__FILE__, __LINE__,
8606 _("arm_extract_return_value: "
8607 "Floating point model not supported"));
8608 break;
8609 }
8610 }
8611 else if (type->code () == TYPE_CODE_INT
8612 || type->code () == TYPE_CODE_CHAR
8613 || type->code () == TYPE_CODE_BOOL
8614 || type->code () == TYPE_CODE_PTR
8615 || TYPE_IS_REFERENCE (type)
8616 || type->code () == TYPE_CODE_ENUM
8617 || is_fixed_point_type (type))
8618 {
8619 /* If the type is a plain integer, then the access is
8620 straight-forward. Otherwise we have to play around a bit
8621 more. */
8622 int len = TYPE_LENGTH (type);
8623 int regno = ARM_A1_REGNUM;
8624 ULONGEST tmp;
8625
8626 while (len > 0)
8627 {
8628 /* By using store_unsigned_integer we avoid having to do
8629 anything special for small big-endian values. */
8630 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8631 store_unsigned_integer (valbuf,
8632 (len > ARM_INT_REGISTER_SIZE
8633 ? ARM_INT_REGISTER_SIZE : len),
8634 byte_order, tmp);
8635 len -= ARM_INT_REGISTER_SIZE;
8636 valbuf += ARM_INT_REGISTER_SIZE;
8637 }
8638 }
8639 else
8640 {
8641 /* For a structure or union the behaviour is as if the value had
8642 been stored to word-aligned memory and then loaded into
8643 registers with 32-bit load instruction(s). */
8644 int len = TYPE_LENGTH (type);
8645 int regno = ARM_A1_REGNUM;
8646 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8647
8648 while (len > 0)
8649 {
8650 regs->cooked_read (regno++, tmpbuf);
8651 memcpy (valbuf, tmpbuf,
8652 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8653 len -= ARM_INT_REGISTER_SIZE;
8654 valbuf += ARM_INT_REGISTER_SIZE;
8655 }
8656 }
8657 }
8658
8659
8660 /* Will a function return an aggregate type in memory or in a
8661 register? Return 0 if an aggregate type can be returned in a
8662 register, 1 if it must be returned in memory. */
8663
8664 static int
8665 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8666 {
8667 enum type_code code;
8668
8669 type = check_typedef (type);
8670
8671 /* Simple, non-aggregate types (ie not including vectors and
8672 complex) are always returned in a register (or registers). */
8673 code = type->code ();
8674 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8675 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8676 return 0;
8677
8678 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8679 {
8680 /* Vector values should be returned using ARM registers if they
8681 are not over 16 bytes. */
8682 return (TYPE_LENGTH (type) > 16);
8683 }
8684
8685 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8686 if (tdep->arm_abi != ARM_ABI_APCS)
8687 {
8688 /* The AAPCS says all aggregates not larger than a word are returned
8689 in a register. */
8690 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE
8691 && language_pass_by_reference (type).trivially_copyable)
8692 return 0;
8693
8694 return 1;
8695 }
8696 else
8697 {
8698 int nRc;
8699
8700 /* All aggregate types that won't fit in a register must be returned
8701 in memory. */
8702 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE
8703 || !language_pass_by_reference (type).trivially_copyable)
8704 return 1;
8705
8706 /* In the ARM ABI, "integer" like aggregate types are returned in
8707 registers. For an aggregate type to be integer like, its size
8708 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8709 offset of each addressable subfield must be zero. Note that bit
8710 fields are not addressable, and all addressable subfields of
8711 unions always start at offset zero.
8712
8713 This function is based on the behaviour of GCC 2.95.1.
8714 See: gcc/arm.c: arm_return_in_memory() for details.
8715
8716 Note: All versions of GCC before GCC 2.95.2 do not set up the
8717 parameters correctly for a function returning the following
8718 structure: struct { float f;}; This should be returned in memory,
8719 not a register. Richard Earnshaw sent me a patch, but I do not
8720 know of any way to detect if a function like the above has been
8721 compiled with the correct calling convention. */
8722
8723 /* Assume all other aggregate types can be returned in a register.
8724 Run a check for structures, unions and arrays. */
8725 nRc = 0;
8726
8727 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8728 {
8729 int i;
8730 /* Need to check if this struct/union is "integer" like. For
8731 this to be true, its size must be less than or equal to
8732 ARM_INT_REGISTER_SIZE and the offset of each addressable
8733 subfield must be zero. Note that bit fields are not
8734 addressable, and unions always start at offset zero. If any
8735 of the subfields is a floating point type, the struct/union
8736 cannot be an integer type. */
8737
8738 /* For each field in the object, check:
8739 1) Is it FP? --> yes, nRc = 1;
8740 2) Is it addressable (bitpos != 0) and
8741 not packed (bitsize == 0)?
8742 --> yes, nRc = 1
8743 */
8744
8745 for (i = 0; i < type->num_fields (); i++)
8746 {
8747 enum type_code field_type_code;
8748
8749 field_type_code
8750 = check_typedef (type->field (i).type ())->code ();
8751
8752 /* Is it a floating point type field? */
8753 if (field_type_code == TYPE_CODE_FLT)
8754 {
8755 nRc = 1;
8756 break;
8757 }
8758
8759 /* If bitpos != 0, then we have to care about it. */
8760 if (type->field (i).loc_bitpos () != 0)
8761 {
8762 /* Bitfields are not addressable. If the field bitsize is
8763 zero, then the field is not packed. Hence it cannot be
8764 a bitfield or any other packed type. */
8765 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8766 {
8767 nRc = 1;
8768 break;
8769 }
8770 }
8771 }
8772 }
8773
8774 return nRc;
8775 }
8776 }
8777
8778 /* Write into appropriate registers a function return value of type
8779 TYPE, given in virtual format. */
8780
8781 static void
8782 arm_store_return_value (struct type *type, struct regcache *regs,
8783 const gdb_byte *valbuf)
8784 {
8785 struct gdbarch *gdbarch = regs->arch ();
8786 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8787
8788 if (type->code () == TYPE_CODE_FLT)
8789 {
8790 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8791 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8792
8793 switch (tdep->fp_model)
8794 {
8795 case ARM_FLOAT_FPA:
8796
8797 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8798 regs->cooked_write (ARM_F0_REGNUM, buf);
8799 break;
8800
8801 case ARM_FLOAT_SOFT_FPA:
8802 case ARM_FLOAT_SOFT_VFP:
8803 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8804 not using the VFP ABI code. */
8805 case ARM_FLOAT_VFP:
8806 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8807 if (TYPE_LENGTH (type) > 4)
8808 regs->cooked_write (ARM_A1_REGNUM + 1,
8809 valbuf + ARM_INT_REGISTER_SIZE);
8810 break;
8811
8812 default:
8813 internal_error (__FILE__, __LINE__,
8814 _("arm_store_return_value: Floating "
8815 "point model not supported"));
8816 break;
8817 }
8818 }
8819 else if (type->code () == TYPE_CODE_INT
8820 || type->code () == TYPE_CODE_CHAR
8821 || type->code () == TYPE_CODE_BOOL
8822 || type->code () == TYPE_CODE_PTR
8823 || TYPE_IS_REFERENCE (type)
8824 || type->code () == TYPE_CODE_ENUM)
8825 {
8826 if (TYPE_LENGTH (type) <= 4)
8827 {
8828 /* Values of one word or less are zero/sign-extended and
8829 returned in r0. */
8830 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8831 LONGEST val = unpack_long (type, valbuf);
8832
8833 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8834 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8835 }
8836 else
8837 {
8838 /* Integral values greater than one word are stored in consecutive
8839 registers starting with r0. This will always be a multiple of
8840 the regiser size. */
8841 int len = TYPE_LENGTH (type);
8842 int regno = ARM_A1_REGNUM;
8843
8844 while (len > 0)
8845 {
8846 regs->cooked_write (regno++, valbuf);
8847 len -= ARM_INT_REGISTER_SIZE;
8848 valbuf += ARM_INT_REGISTER_SIZE;
8849 }
8850 }
8851 }
8852 else
8853 {
8854 /* For a structure or union the behaviour is as if the value had
8855 been stored to word-aligned memory and then loaded into
8856 registers with 32-bit load instruction(s). */
8857 int len = TYPE_LENGTH (type);
8858 int regno = ARM_A1_REGNUM;
8859 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8860
8861 while (len > 0)
8862 {
8863 memcpy (tmpbuf, valbuf,
8864 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8865 regs->cooked_write (regno++, tmpbuf);
8866 len -= ARM_INT_REGISTER_SIZE;
8867 valbuf += ARM_INT_REGISTER_SIZE;
8868 }
8869 }
8870 }
8871
8872
8873 /* Handle function return values. */
8874
8875 static enum return_value_convention
8876 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8877 struct type *valtype, struct regcache *regcache,
8878 gdb_byte *readbuf, const gdb_byte *writebuf)
8879 {
8880 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8881 struct type *func_type = function ? value_type (function) : NULL;
8882 enum arm_vfp_cprc_base_type vfp_base_type;
8883 int vfp_base_count;
8884
8885 if (arm_vfp_abi_for_function (gdbarch, func_type)
8886 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8887 {
8888 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8889 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8890 int i;
8891 for (i = 0; i < vfp_base_count; i++)
8892 {
8893 if (reg_char == 'q')
8894 {
8895 if (writebuf)
8896 arm_neon_quad_write (gdbarch, regcache, i,
8897 writebuf + i * unit_length);
8898
8899 if (readbuf)
8900 arm_neon_quad_read (gdbarch, regcache, i,
8901 readbuf + i * unit_length);
8902 }
8903 else
8904 {
8905 char name_buf[4];
8906 int regnum;
8907
8908 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8909 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8910 strlen (name_buf));
8911 if (writebuf)
8912 regcache->cooked_write (regnum, writebuf + i * unit_length);
8913 if (readbuf)
8914 regcache->cooked_read (regnum, readbuf + i * unit_length);
8915 }
8916 }
8917 return RETURN_VALUE_REGISTER_CONVENTION;
8918 }
8919
8920 if (valtype->code () == TYPE_CODE_STRUCT
8921 || valtype->code () == TYPE_CODE_UNION
8922 || valtype->code () == TYPE_CODE_ARRAY)
8923 {
8924 /* From the AAPCS document:
8925
8926 Result return:
8927
8928 A Composite Type larger than 4 bytes, or whose size cannot be
8929 determined statically by both caller and callee, is stored in memory
8930 at an address passed as an extra argument when the function was
8931 called (Parameter Passing, rule A.4). The memory to be used for the
8932 result may be modified at any point during the function call.
8933
8934 Parameter Passing:
8935
8936 A.4: If the subroutine is a function that returns a result in memory,
8937 then the address for the result is placed in r0 and the NCRN is set
8938 to r1. */
8939 if (tdep->struct_return == pcc_struct_return
8940 || arm_return_in_memory (gdbarch, valtype))
8941 {
8942 if (readbuf)
8943 {
8944 CORE_ADDR addr;
8945
8946 regcache->cooked_read (ARM_A1_REGNUM, &addr);
8947 read_memory (addr, readbuf, TYPE_LENGTH (valtype));
8948 }
8949 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
8950 }
8951 }
8952 else if (valtype->code () == TYPE_CODE_COMPLEX)
8953 {
8954 if (arm_return_in_memory (gdbarch, valtype))
8955 return RETURN_VALUE_STRUCT_CONVENTION;
8956 }
8957
8958 if (writebuf)
8959 arm_store_return_value (valtype, regcache, writebuf);
8960
8961 if (readbuf)
8962 arm_extract_return_value (valtype, regcache, readbuf);
8963
8964 return RETURN_VALUE_REGISTER_CONVENTION;
8965 }
8966
8967
8968 static int
8969 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8970 {
8971 struct gdbarch *gdbarch = get_frame_arch (frame);
8972 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8973 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8974 CORE_ADDR jb_addr;
8975 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8976
8977 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8978
8979 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8980 ARM_INT_REGISTER_SIZE))
8981 return 0;
8982
8983 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8984 return 1;
8985 }
8986 /* A call to cmse secure entry function "foo" at "a" is modified by
8987 GNU ld as "b".
8988 a) bl xxxx <foo>
8989
8990 <foo>
8991 xxxx:
8992
8993 b) bl yyyy <__acle_se_foo>
8994
8995 section .gnu.sgstubs:
8996 <foo>
8997 yyyy: sg // secure gateway
8998 b.w xxxx <__acle_se_foo> // original_branch_dest
8999
9000 <__acle_se_foo>
9001 xxxx:
9002
9003 When the control at "b", the pc contains "yyyy" (sg address) which is a
9004 trampoline and does not exist in source code. This function returns the
9005 target pc "xxxx". For more details please refer to section 5.4
9006 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9007 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9008 document on www.developer.arm.com. */
9009
9010 static CORE_ADDR
9011 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9012 {
9013 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9014 char *target_name = (char *) alloca (target_len);
9015 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9016
9017 struct bound_minimal_symbol minsym
9018 = lookup_minimal_symbol (target_name, NULL, objfile);
9019
9020 if (minsym.minsym != nullptr)
9021 return minsym.value_address ();
9022
9023 return 0;
9024 }
9025
9026 /* Return true when SEC points to ".gnu.sgstubs" section. */
9027
9028 static bool
9029 arm_is_sgstubs_section (struct obj_section *sec)
9030 {
9031 return (sec != nullptr
9032 && sec->the_bfd_section != nullptr
9033 && sec->the_bfd_section->name != nullptr
9034 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9035 }
9036
9037 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9038 return the target PC. Otherwise return 0. */
9039
9040 CORE_ADDR
9041 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9042 {
9043 const char *name;
9044 int namelen;
9045 CORE_ADDR start_addr;
9046
9047 /* Find the starting address and name of the function containing the PC. */
9048 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9049 {
9050 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9051 check here. */
9052 start_addr = arm_skip_bx_reg (frame, pc);
9053 if (start_addr != 0)
9054 return start_addr;
9055
9056 return 0;
9057 }
9058
9059 /* If PC is in a Thumb call or return stub, return the address of the
9060 target PC, which is in a register. The thunk functions are called
9061 _call_via_xx, where x is the register name. The possible names
9062 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9063 functions, named __ARM_call_via_r[0-7]. */
9064 if (startswith (name, "_call_via_")
9065 || startswith (name, "__ARM_call_via_"))
9066 {
9067 /* Use the name suffix to determine which register contains the
9068 target PC. */
9069 static const char *table[15] =
9070 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9071 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9072 };
9073 int regno;
9074 int offset = strlen (name) - 2;
9075
9076 for (regno = 0; regno <= 14; regno++)
9077 if (strcmp (&name[offset], table[regno]) == 0)
9078 return get_frame_register_unsigned (frame, regno);
9079 }
9080
9081 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9082 non-interworking calls to foo. We could decode the stubs
9083 to find the target but it's easier to use the symbol table. */
9084 namelen = strlen (name);
9085 if (name[0] == '_' && name[1] == '_'
9086 && ((namelen > 2 + strlen ("_from_thumb")
9087 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9088 || (namelen > 2 + strlen ("_from_arm")
9089 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9090 {
9091 char *target_name;
9092 int target_len = namelen - 2;
9093 struct bound_minimal_symbol minsym;
9094 struct objfile *objfile;
9095 struct obj_section *sec;
9096
9097 if (name[namelen - 1] == 'b')
9098 target_len -= strlen ("_from_thumb");
9099 else
9100 target_len -= strlen ("_from_arm");
9101
9102 target_name = (char *) alloca (target_len + 1);
9103 memcpy (target_name, name + 2, target_len);
9104 target_name[target_len] = '\0';
9105
9106 sec = find_pc_section (pc);
9107 objfile = (sec == NULL) ? NULL : sec->objfile;
9108 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9109 if (minsym.minsym != NULL)
9110 return minsym.value_address ();
9111 else
9112 return 0;
9113 }
9114
9115 struct obj_section *section = find_pc_section (pc);
9116
9117 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9118 if (arm_is_sgstubs_section (section))
9119 return arm_skip_cmse_entry (pc, name, section->objfile);
9120
9121 return 0; /* not a stub */
9122 }
9123
9124 static void
9125 arm_update_current_architecture (void)
9126 {
9127 /* If the current architecture is not ARM, we have nothing to do. */
9128 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9129 return;
9130
9131 /* Update the architecture. */
9132 gdbarch_info info;
9133 if (!gdbarch_update_p (info))
9134 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9135 }
9136
9137 static void
9138 set_fp_model_sfunc (const char *args, int from_tty,
9139 struct cmd_list_element *c)
9140 {
9141 int fp_model;
9142
9143 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9144 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9145 {
9146 arm_fp_model = (enum arm_float_model) fp_model;
9147 break;
9148 }
9149
9150 if (fp_model == ARM_FLOAT_LAST)
9151 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9152 current_fp_model);
9153
9154 arm_update_current_architecture ();
9155 }
9156
9157 static void
9158 show_fp_model (struct ui_file *file, int from_tty,
9159 struct cmd_list_element *c, const char *value)
9160 {
9161 arm_gdbarch_tdep *tdep
9162 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9163
9164 if (arm_fp_model == ARM_FLOAT_AUTO
9165 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9166 gdb_printf (file, _("\
9167 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9168 fp_model_strings[tdep->fp_model]);
9169 else
9170 gdb_printf (file, _("\
9171 The current ARM floating point model is \"%s\".\n"),
9172 fp_model_strings[arm_fp_model]);
9173 }
9174
9175 static void
9176 arm_set_abi (const char *args, int from_tty,
9177 struct cmd_list_element *c)
9178 {
9179 int arm_abi;
9180
9181 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9182 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9183 {
9184 arm_abi_global = (enum arm_abi_kind) arm_abi;
9185 break;
9186 }
9187
9188 if (arm_abi == ARM_ABI_LAST)
9189 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9190 arm_abi_string);
9191
9192 arm_update_current_architecture ();
9193 }
9194
9195 static void
9196 arm_show_abi (struct ui_file *file, int from_tty,
9197 struct cmd_list_element *c, const char *value)
9198 {
9199 arm_gdbarch_tdep *tdep
9200 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
9201
9202 if (arm_abi_global == ARM_ABI_AUTO
9203 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9204 gdb_printf (file, _("\
9205 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9206 arm_abi_strings[tdep->arm_abi]);
9207 else
9208 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9209 arm_abi_string);
9210 }
9211
9212 static void
9213 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9214 struct cmd_list_element *c, const char *value)
9215 {
9216 gdb_printf (file,
9217 _("The current execution mode assumed "
9218 "(when symbols are unavailable) is \"%s\".\n"),
9219 arm_fallback_mode_string);
9220 }
9221
9222 static void
9223 arm_show_force_mode (struct ui_file *file, int from_tty,
9224 struct cmd_list_element *c, const char *value)
9225 {
9226 gdb_printf (file,
9227 _("The current execution mode assumed "
9228 "(even when symbols are available) is \"%s\".\n"),
9229 arm_force_mode_string);
9230 }
9231
9232 static void
9233 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9234 struct cmd_list_element *c, const char *value)
9235 {
9236 gdb_printf (file,
9237 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9238 arm_unwind_secure_frames ? "on" : "off");
9239 }
9240
9241 /* If the user changes the register disassembly style used for info
9242 register and other commands, we have to also switch the style used
9243 in opcodes for disassembly output. This function is run in the "set
9244 arm disassembly" command, and does that. */
9245
9246 static void
9247 set_disassembly_style_sfunc (const char *args, int from_tty,
9248 struct cmd_list_element *c)
9249 {
9250 /* Convert the short style name into the long style name (eg, reg-names-*)
9251 before calling the generic set_disassembler_options() function. */
9252 std::string long_name = std::string ("reg-names-") + disassembly_style;
9253 set_disassembler_options (&long_name[0]);
9254 }
9255
9256 static void
9257 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9258 struct cmd_list_element *c, const char *value)
9259 {
9260 struct gdbarch *gdbarch = get_current_arch ();
9261 char *options = get_disassembler_options (gdbarch);
9262 const char *style = "";
9263 int len = 0;
9264 const char *opt;
9265
9266 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9267 if (startswith (opt, "reg-names-"))
9268 {
9269 style = &opt[strlen ("reg-names-")];
9270 len = strcspn (style, ",");
9271 }
9272
9273 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9274 }
9275 \f
9276 /* Return the ARM register name corresponding to register I. */
9277 static const char *
9278 arm_register_name (struct gdbarch *gdbarch, int i)
9279 {
9280 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9281
9282 if (is_s_pseudo (gdbarch, i))
9283 {
9284 static const char *const s_pseudo_names[] = {
9285 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9286 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9287 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9288 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9289 };
9290
9291 return s_pseudo_names[i - tdep->s_pseudo_base];
9292 }
9293
9294 if (is_q_pseudo (gdbarch, i))
9295 {
9296 static const char *const q_pseudo_names[] = {
9297 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9298 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9299 };
9300
9301 return q_pseudo_names[i - tdep->q_pseudo_base];
9302 }
9303
9304 if (is_mve_pseudo (gdbarch, i))
9305 return "p0";
9306
9307 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9308 if (is_pacbti_pseudo (gdbarch, i))
9309 return "";
9310
9311 if (i >= ARRAY_SIZE (arm_register_names))
9312 /* These registers are only supported on targets which supply
9313 an XML description. */
9314 return "";
9315
9316 /* Non-pseudo registers. */
9317 return arm_register_names[i];
9318 }
9319
9320 /* Test whether the coff symbol specific value corresponds to a Thumb
9321 function. */
9322
9323 static int
9324 coff_sym_is_thumb (int val)
9325 {
9326 return (val == C_THUMBEXT
9327 || val == C_THUMBSTAT
9328 || val == C_THUMBEXTFUNC
9329 || val == C_THUMBSTATFUNC
9330 || val == C_THUMBLABEL);
9331 }
9332
9333 /* arm_coff_make_msymbol_special()
9334 arm_elf_make_msymbol_special()
9335
9336 These functions test whether the COFF or ELF symbol corresponds to
9337 an address in thumb code, and set a "special" bit in a minimal
9338 symbol to indicate that it does. */
9339
9340 static void
9341 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9342 {
9343 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9344
9345 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9346 == ST_BRANCH_TO_THUMB)
9347 MSYMBOL_SET_SPECIAL (msym);
9348 }
9349
9350 static void
9351 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9352 {
9353 if (coff_sym_is_thumb (val))
9354 MSYMBOL_SET_SPECIAL (msym);
9355 }
9356
9357 static void
9358 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9359 asymbol *sym)
9360 {
9361 const char *name = bfd_asymbol_name (sym);
9362 struct arm_per_bfd *data;
9363 struct arm_mapping_symbol new_map_sym;
9364
9365 gdb_assert (name[0] == '$');
9366 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9367 return;
9368
9369 data = arm_bfd_data_key.get (objfile->obfd);
9370 if (data == NULL)
9371 data = arm_bfd_data_key.emplace (objfile->obfd,
9372 objfile->obfd->section_count);
9373 arm_mapping_symbol_vec &map
9374 = data->section_maps[bfd_asymbol_section (sym)->index];
9375
9376 new_map_sym.value = sym->value;
9377 new_map_sym.type = name[1];
9378
9379 /* Insert at the end, the vector will be sorted on first use. */
9380 map.push_back (new_map_sym);
9381 }
9382
9383 static void
9384 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9385 {
9386 struct gdbarch *gdbarch = regcache->arch ();
9387 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9388
9389 /* If necessary, set the T bit. */
9390 if (arm_apcs_32)
9391 {
9392 ULONGEST val, t_bit;
9393 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9394 t_bit = arm_psr_thumb_bit (gdbarch);
9395 if (arm_pc_is_thumb (gdbarch, pc))
9396 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9397 val | t_bit);
9398 else
9399 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9400 val & ~t_bit);
9401 }
9402 }
9403
9404 /* Read the contents of a NEON quad register, by reading from two
9405 double registers. This is used to implement the quad pseudo
9406 registers, and for argument passing in case the quad registers are
9407 missing; vectors are passed in quad registers when using the VFP
9408 ABI, even if a NEON unit is not present. REGNUM is the index of
9409 the quad register, in [0, 15]. */
9410
9411 static enum register_status
9412 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9413 int regnum, gdb_byte *buf)
9414 {
9415 char name_buf[4];
9416 gdb_byte reg_buf[8];
9417 int offset, double_regnum;
9418 enum register_status status;
9419
9420 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9421 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9422 strlen (name_buf));
9423
9424 /* d0 is always the least significant half of q0. */
9425 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9426 offset = 8;
9427 else
9428 offset = 0;
9429
9430 status = regcache->raw_read (double_regnum, reg_buf);
9431 if (status != REG_VALID)
9432 return status;
9433 memcpy (buf + offset, reg_buf, 8);
9434
9435 offset = 8 - offset;
9436 status = regcache->raw_read (double_regnum + 1, reg_buf);
9437 if (status != REG_VALID)
9438 return status;
9439 memcpy (buf + offset, reg_buf, 8);
9440
9441 return REG_VALID;
9442 }
9443
9444 /* Read the contents of the MVE pseudo register REGNUM and store it
9445 in BUF. */
9446
9447 static enum register_status
9448 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9449 int regnum, gdb_byte *buf)
9450 {
9451 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9452
9453 /* P0 is the first 16 bits of VPR. */
9454 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9455 }
9456
9457 static enum register_status
9458 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9459 int regnum, gdb_byte *buf)
9460 {
9461 const int num_regs = gdbarch_num_regs (gdbarch);
9462 char name_buf[4];
9463 gdb_byte reg_buf[8];
9464 int offset, double_regnum;
9465 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9466
9467 gdb_assert (regnum >= num_regs);
9468
9469 if (is_q_pseudo (gdbarch, regnum))
9470 {
9471 /* Quad-precision register. */
9472 return arm_neon_quad_read (gdbarch, regcache,
9473 regnum - tdep->q_pseudo_base, buf);
9474 }
9475 else if (is_mve_pseudo (gdbarch, regnum))
9476 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9477 else
9478 {
9479 enum register_status status;
9480
9481 regnum -= tdep->s_pseudo_base;
9482 /* Single-precision register. */
9483 gdb_assert (regnum < 32);
9484
9485 /* s0 is always the least significant half of d0. */
9486 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9487 offset = (regnum & 1) ? 0 : 4;
9488 else
9489 offset = (regnum & 1) ? 4 : 0;
9490
9491 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9492 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9493 strlen (name_buf));
9494
9495 status = regcache->raw_read (double_regnum, reg_buf);
9496 if (status == REG_VALID)
9497 memcpy (buf, reg_buf + offset, 4);
9498 return status;
9499 }
9500 }
9501
9502 /* Store the contents of BUF to a NEON quad register, by writing to
9503 two double registers. This is used to implement the quad pseudo
9504 registers, and for argument passing in case the quad registers are
9505 missing; vectors are passed in quad registers when using the VFP
9506 ABI, even if a NEON unit is not present. REGNUM is the index
9507 of the quad register, in [0, 15]. */
9508
9509 static void
9510 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9511 int regnum, const gdb_byte *buf)
9512 {
9513 char name_buf[4];
9514 int offset, double_regnum;
9515
9516 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9517 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9518 strlen (name_buf));
9519
9520 /* d0 is always the least significant half of q0. */
9521 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9522 offset = 8;
9523 else
9524 offset = 0;
9525
9526 regcache->raw_write (double_regnum, buf + offset);
9527 offset = 8 - offset;
9528 regcache->raw_write (double_regnum + 1, buf + offset);
9529 }
9530
9531 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9532
9533 static void
9534 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9535 int regnum, const gdb_byte *buf)
9536 {
9537 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9538
9539 /* P0 is the first 16 bits of VPR. */
9540 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9541 }
9542
9543 static void
9544 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9545 int regnum, const gdb_byte *buf)
9546 {
9547 const int num_regs = gdbarch_num_regs (gdbarch);
9548 char name_buf[4];
9549 gdb_byte reg_buf[8];
9550 int offset, double_regnum;
9551 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9552
9553 gdb_assert (regnum >= num_regs);
9554
9555 if (is_q_pseudo (gdbarch, regnum))
9556 {
9557 /* Quad-precision register. */
9558 arm_neon_quad_write (gdbarch, regcache,
9559 regnum - tdep->q_pseudo_base, buf);
9560 }
9561 else if (is_mve_pseudo (gdbarch, regnum))
9562 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9563 else
9564 {
9565 regnum -= tdep->s_pseudo_base;
9566 /* Single-precision register. */
9567 gdb_assert (regnum < 32);
9568
9569 /* s0 is always the least significant half of d0. */
9570 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9571 offset = (regnum & 1) ? 0 : 4;
9572 else
9573 offset = (regnum & 1) ? 4 : 0;
9574
9575 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9576 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9577 strlen (name_buf));
9578
9579 regcache->raw_read (double_regnum, reg_buf);
9580 memcpy (reg_buf + offset, buf, 4);
9581 regcache->raw_write (double_regnum, reg_buf);
9582 }
9583 }
9584
9585 static struct value *
9586 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9587 {
9588 const int *reg_p = (const int *) baton;
9589 return value_of_register (*reg_p, frame);
9590 }
9591 \f
9592 static enum gdb_osabi
9593 arm_elf_osabi_sniffer (bfd *abfd)
9594 {
9595 unsigned int elfosabi;
9596 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9597
9598 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9599
9600 if (elfosabi == ELFOSABI_ARM)
9601 /* GNU tools use this value. Check note sections in this case,
9602 as well. */
9603 {
9604 for (asection *sect : gdb_bfd_sections (abfd))
9605 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9606 }
9607
9608 /* Anything else will be handled by the generic ELF sniffer. */
9609 return osabi;
9610 }
9611
9612 static int
9613 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9614 const struct reggroup *group)
9615 {
9616 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9617 this, FPS register belongs to save_regroup, restore_reggroup, and
9618 all_reggroup, of course. */
9619 if (regnum == ARM_FPS_REGNUM)
9620 return (group == float_reggroup
9621 || group == save_reggroup
9622 || group == restore_reggroup
9623 || group == all_reggroup);
9624 else
9625 return default_register_reggroup_p (gdbarch, regnum, group);
9626 }
9627
9628 /* For backward-compatibility we allow two 'g' packet lengths with
9629 the remote protocol depending on whether FPA registers are
9630 supplied. M-profile targets do not have FPA registers, but some
9631 stubs already exist in the wild which use a 'g' packet which
9632 supplies them albeit with dummy values. The packet format which
9633 includes FPA registers should be considered deprecated for
9634 M-profile targets. */
9635
9636 static void
9637 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9638 {
9639 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9640
9641 if (tdep->is_m)
9642 {
9643 const target_desc *tdesc;
9644
9645 /* If we know from the executable this is an M-profile target,
9646 cater for remote targets whose register set layout is the
9647 same as the FPA layout. */
9648 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9649 register_remote_g_packet_guess (gdbarch,
9650 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9651 tdesc);
9652
9653 /* The regular M-profile layout. */
9654 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9655 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9656 tdesc);
9657
9658 /* M-profile plus M4F VFP. */
9659 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9660 register_remote_g_packet_guess (gdbarch,
9661 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9662 tdesc);
9663 /* M-profile plus MVE. */
9664 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9665 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9666 + ARM_VFP2_REGS_SIZE
9667 + ARM_INT_REGISTER_SIZE, tdesc);
9668
9669 /* M-profile system (stack pointers). */
9670 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
9671 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
9672 }
9673
9674 /* Otherwise we don't have a useful guess. */
9675 }
9676
9677 /* Implement the code_of_frame_writable gdbarch method. */
9678
9679 static int
9680 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9681 {
9682 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9683
9684 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9685 {
9686 /* M-profile exception frames return to some magic PCs, where
9687 isn't writable at all. */
9688 return 0;
9689 }
9690 else
9691 return 1;
9692 }
9693
9694 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9695 to be postfixed by a version (eg armv7hl). */
9696
9697 static const char *
9698 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9699 {
9700 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9701 return "arm(v[^- ]*)?";
9702 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9703 }
9704
9705 /* Implement the "get_pc_address_flags" gdbarch method. */
9706
9707 static std::string
9708 arm_get_pc_address_flags (frame_info *frame, CORE_ADDR pc)
9709 {
9710 if (get_frame_pc_masked (frame))
9711 return "PAC";
9712
9713 return "";
9714 }
9715
9716 /* Initialize the current architecture based on INFO. If possible,
9717 re-use an architecture from ARCHES, which is a list of
9718 architectures already created during this debugging session.
9719
9720 Called e.g. at program startup, when reading a core file, and when
9721 reading a binary file. */
9722
9723 static struct gdbarch *
9724 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9725 {
9726 struct gdbarch *gdbarch;
9727 struct gdbarch_list *best_arch;
9728 enum arm_abi_kind arm_abi = arm_abi_global;
9729 enum arm_float_model fp_model = arm_fp_model;
9730 tdesc_arch_data_up tdesc_data;
9731 int i;
9732 bool is_m = false;
9733 bool have_sec_ext = false;
9734 int vfp_register_count = 0;
9735 bool have_s_pseudos = false, have_q_pseudos = false;
9736 bool have_wmmx_registers = false;
9737 bool have_neon = false;
9738 bool have_fpa_registers = true;
9739 const struct target_desc *tdesc = info.target_desc;
9740 bool have_vfp = false;
9741 bool have_mve = false;
9742 bool have_pacbti = false;
9743 int mve_vpr_regnum = -1;
9744 int register_count = ARM_NUM_REGS;
9745 bool have_m_profile_msp = false;
9746 int m_profile_msp_regnum = -1;
9747 int m_profile_psp_regnum = -1;
9748 int m_profile_msp_ns_regnum = -1;
9749 int m_profile_psp_ns_regnum = -1;
9750 int m_profile_msp_s_regnum = -1;
9751 int m_profile_psp_s_regnum = -1;
9752 int tls_regnum = 0;
9753
9754 /* If we have an object to base this architecture on, try to determine
9755 its ABI. */
9756
9757 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9758 {
9759 int ei_osabi, e_flags;
9760
9761 switch (bfd_get_flavour (info.abfd))
9762 {
9763 case bfd_target_coff_flavour:
9764 /* Assume it's an old APCS-style ABI. */
9765 /* XXX WinCE? */
9766 arm_abi = ARM_ABI_APCS;
9767 break;
9768
9769 case bfd_target_elf_flavour:
9770 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9771 e_flags = elf_elfheader (info.abfd)->e_flags;
9772
9773 if (ei_osabi == ELFOSABI_ARM)
9774 {
9775 /* GNU tools used to use this value, but do not for EABI
9776 objects. There's nowhere to tag an EABI version
9777 anyway, so assume APCS. */
9778 arm_abi = ARM_ABI_APCS;
9779 }
9780 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9781 {
9782 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9783
9784 switch (eabi_ver)
9785 {
9786 case EF_ARM_EABI_UNKNOWN:
9787 /* Assume GNU tools. */
9788 arm_abi = ARM_ABI_APCS;
9789 break;
9790
9791 case EF_ARM_EABI_VER4:
9792 case EF_ARM_EABI_VER5:
9793 arm_abi = ARM_ABI_AAPCS;
9794 /* EABI binaries default to VFP float ordering.
9795 They may also contain build attributes that can
9796 be used to identify if the VFP argument-passing
9797 ABI is in use. */
9798 if (fp_model == ARM_FLOAT_AUTO)
9799 {
9800 #ifdef HAVE_ELF
9801 switch (bfd_elf_get_obj_attr_int (info.abfd,
9802 OBJ_ATTR_PROC,
9803 Tag_ABI_VFP_args))
9804 {
9805 case AEABI_VFP_args_base:
9806 /* "The user intended FP parameter/result
9807 passing to conform to AAPCS, base
9808 variant". */
9809 fp_model = ARM_FLOAT_SOFT_VFP;
9810 break;
9811 case AEABI_VFP_args_vfp:
9812 /* "The user intended FP parameter/result
9813 passing to conform to AAPCS, VFP
9814 variant". */
9815 fp_model = ARM_FLOAT_VFP;
9816 break;
9817 case AEABI_VFP_args_toolchain:
9818 /* "The user intended FP parameter/result
9819 passing to conform to tool chain-specific
9820 conventions" - we don't know any such
9821 conventions, so leave it as "auto". */
9822 break;
9823 case AEABI_VFP_args_compatible:
9824 /* "Code is compatible with both the base
9825 and VFP variants; the user did not permit
9826 non-variadic functions to pass FP
9827 parameters/results" - leave it as
9828 "auto". */
9829 break;
9830 default:
9831 /* Attribute value not mentioned in the
9832 November 2012 ABI, so leave it as
9833 "auto". */
9834 break;
9835 }
9836 #else
9837 fp_model = ARM_FLOAT_SOFT_VFP;
9838 #endif
9839 }
9840 break;
9841
9842 default:
9843 /* Leave it as "auto". */
9844 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9845 break;
9846 }
9847
9848 #ifdef HAVE_ELF
9849 /* Detect M-profile programs. This only works if the
9850 executable file includes build attributes; GCC does
9851 copy them to the executable, but e.g. RealView does
9852 not. */
9853 int attr_arch
9854 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9855 Tag_CPU_arch);
9856 int attr_profile
9857 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9858 Tag_CPU_arch_profile);
9859
9860 /* GCC specifies the profile for v6-M; RealView only
9861 specifies the profile for architectures starting with
9862 V7 (as opposed to architectures with a tag
9863 numerically greater than TAG_CPU_ARCH_V7). */
9864 if (!tdesc_has_registers (tdesc)
9865 && (attr_arch == TAG_CPU_ARCH_V6_M
9866 || attr_arch == TAG_CPU_ARCH_V6S_M
9867 || attr_arch == TAG_CPU_ARCH_V7E_M
9868 || attr_arch == TAG_CPU_ARCH_V8M_BASE
9869 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
9870 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9871 || attr_profile == 'M'))
9872 is_m = true;
9873
9874 /* Look for attributes that indicate support for ARMv8.1-m
9875 PACBTI. */
9876 if (!tdesc_has_registers (tdesc) && is_m)
9877 {
9878 int attr_pac_extension
9879 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9880 Tag_PAC_extension);
9881
9882 int attr_bti_extension
9883 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9884 Tag_BTI_extension);
9885
9886 int attr_pacret_use
9887 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9888 Tag_PACRET_use);
9889
9890 int attr_bti_use
9891 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9892 Tag_BTI_use);
9893
9894 if (attr_pac_extension != 0 || attr_bti_extension != 0
9895 || attr_pacret_use != 0 || attr_bti_use != 0)
9896 have_pacbti = true;
9897 }
9898 #endif
9899 }
9900
9901 if (fp_model == ARM_FLOAT_AUTO)
9902 {
9903 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9904 {
9905 case 0:
9906 /* Leave it as "auto". Strictly speaking this case
9907 means FPA, but almost nobody uses that now, and
9908 many toolchains fail to set the appropriate bits
9909 for the floating-point model they use. */
9910 break;
9911 case EF_ARM_SOFT_FLOAT:
9912 fp_model = ARM_FLOAT_SOFT_FPA;
9913 break;
9914 case EF_ARM_VFP_FLOAT:
9915 fp_model = ARM_FLOAT_VFP;
9916 break;
9917 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9918 fp_model = ARM_FLOAT_SOFT_VFP;
9919 break;
9920 }
9921 }
9922
9923 if (e_flags & EF_ARM_BE8)
9924 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9925
9926 break;
9927
9928 default:
9929 /* Leave it as "auto". */
9930 break;
9931 }
9932 }
9933
9934 /* Check any target description for validity. */
9935 if (tdesc_has_registers (tdesc))
9936 {
9937 /* For most registers we require GDB's default names; but also allow
9938 the numeric names for sp / lr / pc, as a convenience. */
9939 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9940 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9941 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9942
9943 const struct tdesc_feature *feature;
9944 int valid_p;
9945
9946 feature = tdesc_find_feature (tdesc,
9947 "org.gnu.gdb.arm.core");
9948 if (feature == NULL)
9949 {
9950 feature = tdesc_find_feature (tdesc,
9951 "org.gnu.gdb.arm.m-profile");
9952 if (feature == NULL)
9953 return NULL;
9954 else
9955 is_m = true;
9956 }
9957
9958 tdesc_data = tdesc_data_alloc ();
9959
9960 valid_p = 1;
9961 for (i = 0; i < ARM_SP_REGNUM; i++)
9962 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9963 arm_register_names[i]);
9964 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9965 ARM_SP_REGNUM,
9966 arm_sp_names);
9967 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9968 ARM_LR_REGNUM,
9969 arm_lr_names);
9970 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9971 ARM_PC_REGNUM,
9972 arm_pc_names);
9973 if (is_m)
9974 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9975 ARM_PS_REGNUM, "xpsr");
9976 else
9977 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9978 ARM_PS_REGNUM, "cpsr");
9979
9980 if (!valid_p)
9981 return NULL;
9982
9983 if (is_m)
9984 {
9985 feature = tdesc_find_feature (tdesc,
9986 "org.gnu.gdb.arm.m-system");
9987 if (feature != nullptr)
9988 {
9989 /* MSP */
9990 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9991 register_count, "msp");
9992 if (!valid_p)
9993 {
9994 warning (_("M-profile m-system feature is missing required register msp."));
9995 return nullptr;
9996 }
9997 have_m_profile_msp = true;
9998 m_profile_msp_regnum = register_count++;
9999
10000 /* PSP */
10001 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10002 register_count, "psp");
10003 if (!valid_p)
10004 {
10005 warning (_("M-profile m-system feature is missing required register psp."));
10006 return nullptr;
10007 }
10008 m_profile_psp_regnum = register_count++;
10009 }
10010 }
10011
10012 feature = tdesc_find_feature (tdesc,
10013 "org.gnu.gdb.arm.fpa");
10014 if (feature != NULL)
10015 {
10016 valid_p = 1;
10017 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10018 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10019 arm_register_names[i]);
10020 if (!valid_p)
10021 return NULL;
10022 }
10023 else
10024 have_fpa_registers = false;
10025
10026 feature = tdesc_find_feature (tdesc,
10027 "org.gnu.gdb.xscale.iwmmxt");
10028 if (feature != NULL)
10029 {
10030 static const char *const iwmmxt_names[] = {
10031 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10032 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10033 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10034 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10035 };
10036
10037 valid_p = 1;
10038 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10039 valid_p
10040 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10041 iwmmxt_names[i - ARM_WR0_REGNUM]);
10042
10043 /* Check for the control registers, but do not fail if they
10044 are missing. */
10045 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10046 tdesc_numbered_register (feature, tdesc_data.get (), i,
10047 iwmmxt_names[i - ARM_WR0_REGNUM]);
10048
10049 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10050 valid_p
10051 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10052 iwmmxt_names[i - ARM_WR0_REGNUM]);
10053
10054 if (!valid_p)
10055 return NULL;
10056
10057 have_wmmx_registers = true;
10058 }
10059
10060 /* If we have a VFP unit, check whether the single precision registers
10061 are present. If not, then we will synthesize them as pseudo
10062 registers. */
10063 feature = tdesc_find_feature (tdesc,
10064 "org.gnu.gdb.arm.vfp");
10065 if (feature != NULL)
10066 {
10067 static const char *const vfp_double_names[] = {
10068 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10069 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10070 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10071 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10072 };
10073
10074 /* Require the double precision registers. There must be either
10075 16 or 32. */
10076 valid_p = 1;
10077 for (i = 0; i < 32; i++)
10078 {
10079 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10080 ARM_D0_REGNUM + i,
10081 vfp_double_names[i]);
10082 if (!valid_p)
10083 break;
10084 }
10085 if (!valid_p && i == 16)
10086 valid_p = 1;
10087
10088 /* Also require FPSCR. */
10089 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10090 ARM_FPSCR_REGNUM, "fpscr");
10091 if (!valid_p)
10092 return NULL;
10093
10094 have_vfp = true;
10095
10096 if (tdesc_unnumbered_register (feature, "s0") == 0)
10097 have_s_pseudos = true;
10098
10099 vfp_register_count = i;
10100
10101 /* If we have VFP, also check for NEON. The architecture allows
10102 NEON without VFP (integer vector operations only), but GDB
10103 does not support that. */
10104 feature = tdesc_find_feature (tdesc,
10105 "org.gnu.gdb.arm.neon");
10106 if (feature != NULL)
10107 {
10108 /* NEON requires 32 double-precision registers. */
10109 if (i != 32)
10110 return NULL;
10111
10112 /* If there are quad registers defined by the stub, use
10113 their type; otherwise (normally) provide them with
10114 the default type. */
10115 if (tdesc_unnumbered_register (feature, "q0") == 0)
10116 have_q_pseudos = true;
10117 }
10118 }
10119
10120 /* Check for the TLS register feature. */
10121 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10122 if (feature != nullptr)
10123 {
10124 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10125 register_count, "tpidruro");
10126 if (!valid_p)
10127 return nullptr;
10128
10129 tls_regnum = register_count;
10130 register_count++;
10131 }
10132
10133 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10134 MVE (Helium) is an M-profile extension. */
10135 if (is_m)
10136 {
10137 /* Do we have the MVE feature? */
10138 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10139
10140 if (feature != nullptr)
10141 {
10142 /* If we have MVE, we must always have the VPR register. */
10143 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10144 register_count, "vpr");
10145 if (!valid_p)
10146 {
10147 warning (_("MVE feature is missing required register vpr."));
10148 return nullptr;
10149 }
10150
10151 have_mve = true;
10152 mve_vpr_regnum = register_count;
10153 register_count++;
10154
10155 /* We can't have Q pseudo registers available here, as that
10156 would mean we have NEON features, and that is only available
10157 on A and R profiles. */
10158 gdb_assert (!have_q_pseudos);
10159
10160 /* Given we have a M-profile target description, if MVE is
10161 enabled and there are VFP registers, we should have Q
10162 pseudo registers (Q0 ~ Q7). */
10163 if (have_vfp)
10164 have_q_pseudos = true;
10165 }
10166
10167 /* Do we have the ARMv8.1-m PACBTI feature? */
10168 feature = tdesc_find_feature (tdesc,
10169 "org.gnu.gdb.arm.m-profile-pacbti");
10170 if (feature != nullptr)
10171 {
10172 /* By advertising this feature, the target acknowledges the
10173 presence of the ARMv8.1-m PACBTI extensions.
10174
10175 We don't care for any particular registers in this group, so
10176 the target is free to include whatever it deems appropriate.
10177
10178 The expectation is for this feature to include the PAC
10179 keys. */
10180 have_pacbti = true;
10181 }
10182
10183 /* Do we have the Security extension? */
10184 feature = tdesc_find_feature (tdesc,
10185 "org.gnu.gdb.arm.secext");
10186 if (feature != nullptr)
10187 {
10188 /* Secure/Non-secure stack pointers. */
10189 /* MSP_NS */
10190 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10191 register_count, "msp_ns");
10192 if (!valid_p)
10193 {
10194 warning (_("M-profile secext feature is missing required register msp_ns."));
10195 return nullptr;
10196 }
10197 m_profile_msp_ns_regnum = register_count++;
10198
10199 /* PSP_NS */
10200 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10201 register_count, "psp_ns");
10202 if (!valid_p)
10203 {
10204 warning (_("M-profile secext feature is missing required register psp_ns."));
10205 return nullptr;
10206 }
10207 m_profile_psp_ns_regnum = register_count++;
10208
10209 /* MSP_S */
10210 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10211 register_count, "msp_s");
10212 if (!valid_p)
10213 {
10214 warning (_("M-profile secext feature is missing required register msp_s."));
10215 return nullptr;
10216 }
10217 m_profile_msp_s_regnum = register_count++;
10218
10219 /* PSP_S */
10220 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10221 register_count, "psp_s");
10222 if (!valid_p)
10223 {
10224 warning (_("M-profile secext feature is missing required register psp_s."));
10225 return nullptr;
10226 }
10227 m_profile_psp_s_regnum = register_count++;
10228
10229 have_sec_ext = true;
10230 }
10231
10232 }
10233 }
10234
10235 /* If there is already a candidate, use it. */
10236 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10237 best_arch != NULL;
10238 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10239 {
10240 arm_gdbarch_tdep *tdep
10241 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
10242
10243 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10244 continue;
10245
10246 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10247 continue;
10248
10249 /* There are various other properties in tdep that we do not
10250 need to check here: those derived from a target description,
10251 since gdbarches with a different target description are
10252 automatically disqualified. */
10253
10254 /* Do check is_m, though, since it might come from the binary. */
10255 if (is_m != tdep->is_m)
10256 continue;
10257
10258 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10259 the binary. */
10260 if (have_pacbti != tdep->have_pacbti)
10261 continue;
10262
10263 /* Found a match. */
10264 break;
10265 }
10266
10267 if (best_arch != NULL)
10268 return best_arch->gdbarch;
10269
10270 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
10271 gdbarch = gdbarch_alloc (&info, tdep);
10272
10273 /* Record additional information about the architecture we are defining.
10274 These are gdbarch discriminators, like the OSABI. */
10275 tdep->arm_abi = arm_abi;
10276 tdep->fp_model = fp_model;
10277 tdep->is_m = is_m;
10278 tdep->have_sec_ext = have_sec_ext;
10279 tdep->have_fpa_registers = have_fpa_registers;
10280 tdep->have_wmmx_registers = have_wmmx_registers;
10281 gdb_assert (vfp_register_count == 0
10282 || vfp_register_count == 16
10283 || vfp_register_count == 32);
10284 tdep->vfp_register_count = vfp_register_count;
10285 tdep->have_s_pseudos = have_s_pseudos;
10286 tdep->have_q_pseudos = have_q_pseudos;
10287 tdep->have_neon = have_neon;
10288 tdep->tls_regnum = tls_regnum;
10289
10290 /* Adjust the MVE feature settings. */
10291 if (have_mve)
10292 {
10293 tdep->have_mve = true;
10294 tdep->mve_vpr_regnum = mve_vpr_regnum;
10295 }
10296
10297 /* Adjust the PACBTI feature settings. */
10298 tdep->have_pacbti = have_pacbti;
10299
10300 /* Adjust the M-profile stack pointers settings. */
10301 if (have_m_profile_msp)
10302 {
10303 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10304 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10305 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10306 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10307 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10308 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10309 }
10310
10311 arm_register_g_packet_guesses (gdbarch);
10312
10313 /* Breakpoints. */
10314 switch (info.byte_order_for_code)
10315 {
10316 case BFD_ENDIAN_BIG:
10317 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10318 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10319 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10320 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10321
10322 break;
10323
10324 case BFD_ENDIAN_LITTLE:
10325 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10326 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10327 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10328 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10329
10330 break;
10331
10332 default:
10333 internal_error (__FILE__, __LINE__,
10334 _("arm_gdbarch_init: bad byte order for float format"));
10335 }
10336
10337 /* On ARM targets char defaults to unsigned. */
10338 set_gdbarch_char_signed (gdbarch, 0);
10339
10340 /* wchar_t is unsigned under the AAPCS. */
10341 if (tdep->arm_abi == ARM_ABI_AAPCS)
10342 set_gdbarch_wchar_signed (gdbarch, 0);
10343 else
10344 set_gdbarch_wchar_signed (gdbarch, 1);
10345
10346 /* Compute type alignment. */
10347 set_gdbarch_type_align (gdbarch, arm_type_align);
10348
10349 /* Note: for displaced stepping, this includes the breakpoint, and one word
10350 of additional scratch space. This setting isn't used for anything beside
10351 displaced stepping at present. */
10352 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10353
10354 /* This should be low enough for everything. */
10355 tdep->lowest_pc = 0x20;
10356 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10357
10358 /* The default, for both APCS and AAPCS, is to return small
10359 structures in registers. */
10360 tdep->struct_return = reg_struct_return;
10361
10362 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10363 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10364
10365 if (is_m)
10366 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10367
10368 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10369
10370 frame_base_set_default (gdbarch, &arm_normal_base);
10371
10372 /* Address manipulation. */
10373 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10374
10375 /* Advance PC across function entry code. */
10376 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10377
10378 /* Detect whether PC is at a point where the stack has been destroyed. */
10379 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10380
10381 /* Skip trampolines. */
10382 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10383
10384 /* The stack grows downward. */
10385 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10386
10387 /* Breakpoint manipulation. */
10388 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10389 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10390 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10391 arm_breakpoint_kind_from_current_state);
10392
10393 /* Information about registers, etc. */
10394 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10395 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10396 set_gdbarch_num_regs (gdbarch, register_count);
10397 set_gdbarch_register_type (gdbarch, arm_register_type);
10398 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10399
10400 /* This "info float" is FPA-specific. Use the generic version if we
10401 do not have FPA. */
10402 if (tdep->have_fpa_registers)
10403 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10404
10405 /* Internal <-> external register number maps. */
10406 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10407 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10408
10409 set_gdbarch_register_name (gdbarch, arm_register_name);
10410
10411 /* Returning results. */
10412 set_gdbarch_return_value (gdbarch, arm_return_value);
10413
10414 /* Disassembly. */
10415 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10416
10417 /* Minsymbol frobbing. */
10418 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10419 set_gdbarch_coff_make_msymbol_special (gdbarch,
10420 arm_coff_make_msymbol_special);
10421 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10422
10423 /* Thumb-2 IT block support. */
10424 set_gdbarch_adjust_breakpoint_address (gdbarch,
10425 arm_adjust_breakpoint_address);
10426
10427 /* Virtual tables. */
10428 set_gdbarch_vbit_in_delta (gdbarch, 1);
10429
10430 /* Hook in the ABI-specific overrides, if they have been registered. */
10431 gdbarch_init_osabi (info, gdbarch);
10432
10433 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10434
10435 /* Add some default predicates. */
10436 if (is_m)
10437 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10438 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10439 dwarf2_append_unwinders (gdbarch);
10440 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10441 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10442 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10443
10444 /* Now we have tuned the configuration, set a few final things,
10445 based on what the OS ABI has told us. */
10446
10447 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10448 binaries are always marked. */
10449 if (tdep->arm_abi == ARM_ABI_AUTO)
10450 tdep->arm_abi = ARM_ABI_APCS;
10451
10452 /* Watchpoints are not steppable. */
10453 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10454
10455 /* We used to default to FPA for generic ARM, but almost nobody
10456 uses that now, and we now provide a way for the user to force
10457 the model. So default to the most useful variant. */
10458 if (tdep->fp_model == ARM_FLOAT_AUTO)
10459 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10460
10461 if (tdep->jb_pc >= 0)
10462 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10463
10464 /* Floating point sizes and format. */
10465 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10466 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10467 {
10468 set_gdbarch_double_format
10469 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10470 set_gdbarch_long_double_format
10471 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10472 }
10473 else
10474 {
10475 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10476 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10477 }
10478
10479 /* Hook used to decorate frames with signed return addresses, only available
10480 for ARMv8.1-m PACBTI. */
10481 if (is_m && have_pacbti)
10482 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10483
10484 if (tdesc_data != nullptr)
10485 {
10486 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10487
10488 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10489 register_count = gdbarch_num_regs (gdbarch);
10490
10491 /* Override tdesc_register_type to adjust the types of VFP
10492 registers for NEON. */
10493 set_gdbarch_register_type (gdbarch, arm_register_type);
10494 }
10495
10496 /* Initialize the pseudo register data. */
10497 int num_pseudos = 0;
10498 if (tdep->have_s_pseudos)
10499 {
10500 /* VFP single precision pseudo registers (S0~S31). */
10501 tdep->s_pseudo_base = register_count;
10502 tdep->s_pseudo_count = 32;
10503 num_pseudos += tdep->s_pseudo_count;
10504
10505 if (tdep->have_q_pseudos)
10506 {
10507 /* NEON quad precision pseudo registers (Q0~Q15). */
10508 tdep->q_pseudo_base = register_count + num_pseudos;
10509
10510 if (have_neon)
10511 tdep->q_pseudo_count = 16;
10512 else if (have_mve)
10513 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10514
10515 num_pseudos += tdep->q_pseudo_count;
10516 }
10517 }
10518
10519 /* Do we have any MVE pseudo registers? */
10520 if (have_mve)
10521 {
10522 tdep->mve_pseudo_base = register_count + num_pseudos;
10523 tdep->mve_pseudo_count = 1;
10524 num_pseudos += tdep->mve_pseudo_count;
10525 }
10526
10527 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10528 if (have_pacbti)
10529 {
10530 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10531 tdep->pacbti_pseudo_count = 1;
10532 num_pseudos += tdep->pacbti_pseudo_count;
10533 }
10534
10535 /* Set some pseudo register hooks, if we have pseudo registers. */
10536 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10537 {
10538 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10539 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10540 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10541 }
10542
10543 /* Add standard register aliases. We add aliases even for those
10544 names which are used by the current architecture - it's simpler,
10545 and does no harm, since nothing ever lists user registers. */
10546 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10547 user_reg_add (gdbarch, arm_register_aliases[i].name,
10548 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10549
10550 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10551 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10552
10553 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10554
10555 return gdbarch;
10556 }
10557
10558 static void
10559 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10560 {
10561 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
10562
10563 if (tdep == NULL)
10564 return;
10565
10566 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10567 (int) tdep->fp_model);
10568 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10569 (int) tdep->have_fpa_registers);
10570 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10571 (int) tdep->have_wmmx_registers);
10572 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10573 (int) tdep->vfp_register_count);
10574 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10575 tdep->have_s_pseudos? "true" : "false");
10576 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10577 (int) tdep->s_pseudo_base);
10578 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10579 (int) tdep->s_pseudo_count);
10580 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10581 tdep->have_q_pseudos? "true" : "false");
10582 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10583 (int) tdep->q_pseudo_base);
10584 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10585 (int) tdep->q_pseudo_count);
10586 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10587 (int) tdep->have_neon);
10588 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10589 tdep->have_mve? "yes" : "no");
10590 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10591 tdep->mve_vpr_regnum);
10592 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10593 tdep->mve_pseudo_base);
10594 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10595 tdep->mve_pseudo_count);
10596 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10597 tdep->m_profile_msp_regnum);
10598 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10599 tdep->m_profile_psp_regnum);
10600 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10601 tdep->m_profile_msp_ns_regnum);
10602 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10603 tdep->m_profile_psp_ns_regnum);
10604 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10605 tdep->m_profile_msp_s_regnum);
10606 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10607 tdep->m_profile_psp_s_regnum);
10608 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10609 (unsigned long) tdep->lowest_pc);
10610 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10611 tdep->have_pacbti? "yes" : "no");
10612 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10613 tdep->pacbti_pseudo_base);
10614 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10615 tdep->pacbti_pseudo_count);
10616 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10617 tdep->is_m? "yes" : "no");
10618 }
10619
10620 #if GDB_SELF_TEST
10621 namespace selftests
10622 {
10623 static void arm_record_test (void);
10624 static void arm_analyze_prologue_test ();
10625 }
10626 #endif
10627
10628 void _initialize_arm_tdep ();
10629 void
10630 _initialize_arm_tdep ()
10631 {
10632 long length;
10633 int i, j;
10634 char regdesc[1024], *rdptr = regdesc;
10635 size_t rest = sizeof (regdesc);
10636
10637 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10638
10639 /* Add ourselves to objfile event chain. */
10640 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10641
10642 /* Register an ELF OS ABI sniffer for ARM binaries. */
10643 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10644 bfd_target_elf_flavour,
10645 arm_elf_osabi_sniffer);
10646
10647 /* Add root prefix command for all "set arm"/"show arm" commands. */
10648 add_setshow_prefix_cmd ("arm", no_class,
10649 _("Various ARM-specific commands."),
10650 _("Various ARM-specific commands."),
10651 &setarmcmdlist, &showarmcmdlist,
10652 &setlist, &showlist);
10653
10654 arm_disassembler_options = xstrdup ("reg-names-std");
10655 const disasm_options_t *disasm_options
10656 = &disassembler_options_arm ()->options;
10657 int num_disassembly_styles = 0;
10658 for (i = 0; disasm_options->name[i] != NULL; i++)
10659 if (startswith (disasm_options->name[i], "reg-names-"))
10660 num_disassembly_styles++;
10661
10662 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10663 valid_disassembly_styles = XNEWVEC (const char *,
10664 num_disassembly_styles + 1);
10665 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10666 if (startswith (disasm_options->name[i], "reg-names-"))
10667 {
10668 size_t offset = strlen ("reg-names-");
10669 const char *style = disasm_options->name[i];
10670 valid_disassembly_styles[j++] = &style[offset];
10671 if (strcmp (&style[offset], "std") == 0)
10672 disassembly_style = &style[offset];
10673 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10674 disasm_options->description[i]);
10675 rdptr += length;
10676 rest -= length;
10677 }
10678 /* Mark the end of valid options. */
10679 valid_disassembly_styles[num_disassembly_styles] = NULL;
10680
10681 /* Create the help text. */
10682 std::string helptext = string_printf ("%s%s%s",
10683 _("The valid values are:\n"),
10684 regdesc,
10685 _("The default is \"std\"."));
10686
10687 add_setshow_enum_cmd("disassembler", no_class,
10688 valid_disassembly_styles, &disassembly_style,
10689 _("Set the disassembly style."),
10690 _("Show the disassembly style."),
10691 helptext.c_str (),
10692 set_disassembly_style_sfunc,
10693 show_disassembly_style_sfunc,
10694 &setarmcmdlist, &showarmcmdlist);
10695
10696 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10697 _("Set usage of ARM 32-bit mode."),
10698 _("Show usage of ARM 32-bit mode."),
10699 _("When off, a 26-bit PC will be used."),
10700 NULL,
10701 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10702 mode is %s. */
10703 &setarmcmdlist, &showarmcmdlist);
10704
10705 /* Add a command to allow the user to force the FPU model. */
10706 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10707 _("Set the floating point type."),
10708 _("Show the floating point type."),
10709 _("auto - Determine the FP typefrom the OS-ABI.\n\
10710 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10711 fpa - FPA co-processor (GCC compiled).\n\
10712 softvfp - Software FP with pure-endian doubles.\n\
10713 vfp - VFP co-processor."),
10714 set_fp_model_sfunc, show_fp_model,
10715 &setarmcmdlist, &showarmcmdlist);
10716
10717 /* Add a command to allow the user to force the ABI. */
10718 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10719 _("Set the ABI."),
10720 _("Show the ABI."),
10721 NULL, arm_set_abi, arm_show_abi,
10722 &setarmcmdlist, &showarmcmdlist);
10723
10724 /* Add two commands to allow the user to force the assumed
10725 execution mode. */
10726 add_setshow_enum_cmd ("fallback-mode", class_support,
10727 arm_mode_strings, &arm_fallback_mode_string,
10728 _("Set the mode assumed when symbols are unavailable."),
10729 _("Show the mode assumed when symbols are unavailable."),
10730 NULL, NULL, arm_show_fallback_mode,
10731 &setarmcmdlist, &showarmcmdlist);
10732 add_setshow_enum_cmd ("force-mode", class_support,
10733 arm_mode_strings, &arm_force_mode_string,
10734 _("Set the mode assumed even when symbols are available."),
10735 _("Show the mode assumed even when symbols are available."),
10736 NULL, NULL, arm_show_force_mode,
10737 &setarmcmdlist, &showarmcmdlist);
10738
10739 /* Add a command to stop triggering security exceptions when
10740 unwinding exception stacks. */
10741 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
10742 _("Set usage of non-secure to secure exception stack unwinding."),
10743 _("Show usage of non-secure to secure exception stack unwinding."),
10744 _("When on, the debugger can trigger memory access traps."),
10745 NULL, arm_show_unwind_secure_frames,
10746 &setarmcmdlist, &showarmcmdlist);
10747
10748 /* Debugging flag. */
10749 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10750 _("Set ARM debugging."),
10751 _("Show ARM debugging."),
10752 _("When on, arm-specific debugging is enabled."),
10753 NULL,
10754 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10755 &setdebuglist, &showdebuglist);
10756
10757 #if GDB_SELF_TEST
10758 selftests::register_test ("arm-record", selftests::arm_record_test);
10759 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
10760 #endif
10761
10762 }
10763
10764 /* ARM-reversible process record data structures. */
10765
10766 #define ARM_INSN_SIZE_BYTES 4
10767 #define THUMB_INSN_SIZE_BYTES 2
10768 #define THUMB2_INSN_SIZE_BYTES 4
10769
10770
10771 /* Position of the bit within a 32-bit ARM instruction
10772 that defines whether the instruction is a load or store. */
10773 #define INSN_S_L_BIT_NUM 20
10774
10775 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10776 do \
10777 { \
10778 unsigned int reg_len = LENGTH; \
10779 if (reg_len) \
10780 { \
10781 REGS = XNEWVEC (uint32_t, reg_len); \
10782 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10783 } \
10784 } \
10785 while (0)
10786
10787 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10788 do \
10789 { \
10790 unsigned int mem_len = LENGTH; \
10791 if (mem_len) \
10792 { \
10793 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10794 memcpy(&MEMS->len, &RECORD_BUF[0], \
10795 sizeof(struct arm_mem_r) * LENGTH); \
10796 } \
10797 } \
10798 while (0)
10799
10800 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10801 #define INSN_RECORDED(ARM_RECORD) \
10802 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10803
10804 /* ARM memory record structure. */
10805 struct arm_mem_r
10806 {
10807 uint32_t len; /* Record length. */
10808 uint32_t addr; /* Memory address. */
10809 };
10810
10811 /* ARM instruction record contains opcode of current insn
10812 and execution state (before entry to decode_insn()),
10813 contains list of to-be-modified registers and
10814 memory blocks (on return from decode_insn()). */
10815
10816 struct arm_insn_decode_record
10817 {
10818 struct gdbarch *gdbarch;
10819 struct regcache *regcache;
10820 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10821 uint32_t arm_insn; /* Should accommodate thumb. */
10822 uint32_t cond; /* Condition code. */
10823 uint32_t opcode; /* Insn opcode. */
10824 uint32_t decode; /* Insn decode bits. */
10825 uint32_t mem_rec_count; /* No of mem records. */
10826 uint32_t reg_rec_count; /* No of reg records. */
10827 uint32_t *arm_regs; /* Registers to be saved for this record. */
10828 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10829 };
10830
10831
10832 /* Checks ARM SBZ and SBO mandatory fields. */
10833
10834 static int
10835 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10836 {
10837 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10838
10839 if (!len)
10840 return 1;
10841
10842 if (!sbo)
10843 ones = ~ones;
10844
10845 while (ones)
10846 {
10847 if (!(ones & sbo))
10848 {
10849 return 0;
10850 }
10851 ones = ones >> 1;
10852 }
10853 return 1;
10854 }
10855
10856 enum arm_record_result
10857 {
10858 ARM_RECORD_SUCCESS = 0,
10859 ARM_RECORD_FAILURE = 1
10860 };
10861
10862 enum arm_record_strx_t
10863 {
10864 ARM_RECORD_STRH=1,
10865 ARM_RECORD_STRD
10866 };
10867
10868 enum record_type_t
10869 {
10870 ARM_RECORD=1,
10871 THUMB_RECORD,
10872 THUMB2_RECORD
10873 };
10874
10875
10876 static int
10877 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
10878 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10879 {
10880
10881 struct regcache *reg_cache = arm_insn_r->regcache;
10882 ULONGEST u_regval[2]= {0};
10883
10884 uint32_t reg_src1 = 0, reg_src2 = 0;
10885 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10886
10887 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10888 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10889
10890 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10891 {
10892 /* 1) Handle misc store, immediate offset. */
10893 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10894 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10895 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10896 regcache_raw_read_unsigned (reg_cache, reg_src1,
10897 &u_regval[0]);
10898 if (ARM_PC_REGNUM == reg_src1)
10899 {
10900 /* If R15 was used as Rn, hence current PC+8. */
10901 u_regval[0] = u_regval[0] + 8;
10902 }
10903 offset_8 = (immed_high << 4) | immed_low;
10904 /* Calculate target store address. */
10905 if (14 == arm_insn_r->opcode)
10906 {
10907 tgt_mem_addr = u_regval[0] + offset_8;
10908 }
10909 else
10910 {
10911 tgt_mem_addr = u_regval[0] - offset_8;
10912 }
10913 if (ARM_RECORD_STRH == str_type)
10914 {
10915 record_buf_mem[0] = 2;
10916 record_buf_mem[1] = tgt_mem_addr;
10917 arm_insn_r->mem_rec_count = 1;
10918 }
10919 else if (ARM_RECORD_STRD == str_type)
10920 {
10921 record_buf_mem[0] = 4;
10922 record_buf_mem[1] = tgt_mem_addr;
10923 record_buf_mem[2] = 4;
10924 record_buf_mem[3] = tgt_mem_addr + 4;
10925 arm_insn_r->mem_rec_count = 2;
10926 }
10927 }
10928 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10929 {
10930 /* 2) Store, register offset. */
10931 /* Get Rm. */
10932 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10933 /* Get Rn. */
10934 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10935 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10936 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10937 if (15 == reg_src2)
10938 {
10939 /* If R15 was used as Rn, hence current PC+8. */
10940 u_regval[0] = u_regval[0] + 8;
10941 }
10942 /* Calculate target store address, Rn +/- Rm, register offset. */
10943 if (12 == arm_insn_r->opcode)
10944 {
10945 tgt_mem_addr = u_regval[0] + u_regval[1];
10946 }
10947 else
10948 {
10949 tgt_mem_addr = u_regval[1] - u_regval[0];
10950 }
10951 if (ARM_RECORD_STRH == str_type)
10952 {
10953 record_buf_mem[0] = 2;
10954 record_buf_mem[1] = tgt_mem_addr;
10955 arm_insn_r->mem_rec_count = 1;
10956 }
10957 else if (ARM_RECORD_STRD == str_type)
10958 {
10959 record_buf_mem[0] = 4;
10960 record_buf_mem[1] = tgt_mem_addr;
10961 record_buf_mem[2] = 4;
10962 record_buf_mem[3] = tgt_mem_addr + 4;
10963 arm_insn_r->mem_rec_count = 2;
10964 }
10965 }
10966 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10967 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10968 {
10969 /* 3) Store, immediate pre-indexed. */
10970 /* 5) Store, immediate post-indexed. */
10971 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10972 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10973 offset_8 = (immed_high << 4) | immed_low;
10974 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10975 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10976 /* Calculate target store address, Rn +/- Rm, register offset. */
10977 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10978 {
10979 tgt_mem_addr = u_regval[0] + offset_8;
10980 }
10981 else
10982 {
10983 tgt_mem_addr = u_regval[0] - offset_8;
10984 }
10985 if (ARM_RECORD_STRH == str_type)
10986 {
10987 record_buf_mem[0] = 2;
10988 record_buf_mem[1] = tgt_mem_addr;
10989 arm_insn_r->mem_rec_count = 1;
10990 }
10991 else if (ARM_RECORD_STRD == str_type)
10992 {
10993 record_buf_mem[0] = 4;
10994 record_buf_mem[1] = tgt_mem_addr;
10995 record_buf_mem[2] = 4;
10996 record_buf_mem[3] = tgt_mem_addr + 4;
10997 arm_insn_r->mem_rec_count = 2;
10998 }
10999 /* Record Rn also as it changes. */
11000 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11001 arm_insn_r->reg_rec_count = 1;
11002 }
11003 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11004 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11005 {
11006 /* 4) Store, register pre-indexed. */
11007 /* 6) Store, register post -indexed. */
11008 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11009 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11010 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11011 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11012 /* Calculate target store address, Rn +/- Rm, register offset. */
11013 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11014 {
11015 tgt_mem_addr = u_regval[0] + u_regval[1];
11016 }
11017 else
11018 {
11019 tgt_mem_addr = u_regval[1] - u_regval[0];
11020 }
11021 if (ARM_RECORD_STRH == str_type)
11022 {
11023 record_buf_mem[0] = 2;
11024 record_buf_mem[1] = tgt_mem_addr;
11025 arm_insn_r->mem_rec_count = 1;
11026 }
11027 else if (ARM_RECORD_STRD == str_type)
11028 {
11029 record_buf_mem[0] = 4;
11030 record_buf_mem[1] = tgt_mem_addr;
11031 record_buf_mem[2] = 4;
11032 record_buf_mem[3] = tgt_mem_addr + 4;
11033 arm_insn_r->mem_rec_count = 2;
11034 }
11035 /* Record Rn also as it changes. */
11036 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11037 arm_insn_r->reg_rec_count = 1;
11038 }
11039 return 0;
11040 }
11041
11042 /* Handling ARM extension space insns. */
11043
11044 static int
11045 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11046 {
11047 int ret = 0; /* Return value: -1:record failure ; 0:success */
11048 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11049 uint32_t record_buf[8], record_buf_mem[8];
11050 uint32_t reg_src1 = 0;
11051 struct regcache *reg_cache = arm_insn_r->regcache;
11052 ULONGEST u_regval = 0;
11053
11054 gdb_assert (!INSN_RECORDED(arm_insn_r));
11055 /* Handle unconditional insn extension space. */
11056
11057 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11058 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11059 if (arm_insn_r->cond)
11060 {
11061 /* PLD has no affect on architectural state, it just affects
11062 the caches. */
11063 if (5 == ((opcode1 & 0xE0) >> 5))
11064 {
11065 /* BLX(1) */
11066 record_buf[0] = ARM_PS_REGNUM;
11067 record_buf[1] = ARM_LR_REGNUM;
11068 arm_insn_r->reg_rec_count = 2;
11069 }
11070 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11071 }
11072
11073
11074 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11075 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11076 {
11077 ret = -1;
11078 /* Undefined instruction on ARM V5; need to handle if later
11079 versions define it. */
11080 }
11081
11082 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11083 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11084 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11085
11086 /* Handle arithmetic insn extension space. */
11087 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11088 && !INSN_RECORDED(arm_insn_r))
11089 {
11090 /* Handle MLA(S) and MUL(S). */
11091 if (in_inclusive_range (insn_op1, 0U, 3U))
11092 {
11093 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11094 record_buf[1] = ARM_PS_REGNUM;
11095 arm_insn_r->reg_rec_count = 2;
11096 }
11097 else if (in_inclusive_range (insn_op1, 4U, 15U))
11098 {
11099 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11100 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11101 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11102 record_buf[2] = ARM_PS_REGNUM;
11103 arm_insn_r->reg_rec_count = 3;
11104 }
11105 }
11106
11107 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11108 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11109 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11110
11111 /* Handle control insn extension space. */
11112
11113 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11114 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11115 {
11116 if (!bit (arm_insn_r->arm_insn,25))
11117 {
11118 if (!bits (arm_insn_r->arm_insn, 4, 7))
11119 {
11120 if ((0 == insn_op1) || (2 == insn_op1))
11121 {
11122 /* MRS. */
11123 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11124 arm_insn_r->reg_rec_count = 1;
11125 }
11126 else if (1 == insn_op1)
11127 {
11128 /* CSPR is going to be changed. */
11129 record_buf[0] = ARM_PS_REGNUM;
11130 arm_insn_r->reg_rec_count = 1;
11131 }
11132 else if (3 == insn_op1)
11133 {
11134 /* SPSR is going to be changed. */
11135 /* We need to get SPSR value, which is yet to be done. */
11136 return -1;
11137 }
11138 }
11139 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11140 {
11141 if (1 == insn_op1)
11142 {
11143 /* BX. */
11144 record_buf[0] = ARM_PS_REGNUM;
11145 arm_insn_r->reg_rec_count = 1;
11146 }
11147 else if (3 == insn_op1)
11148 {
11149 /* CLZ. */
11150 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11151 arm_insn_r->reg_rec_count = 1;
11152 }
11153 }
11154 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11155 {
11156 /* BLX. */
11157 record_buf[0] = ARM_PS_REGNUM;
11158 record_buf[1] = ARM_LR_REGNUM;
11159 arm_insn_r->reg_rec_count = 2;
11160 }
11161 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11162 {
11163 /* QADD, QSUB, QDADD, QDSUB */
11164 record_buf[0] = ARM_PS_REGNUM;
11165 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11166 arm_insn_r->reg_rec_count = 2;
11167 }
11168 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11169 {
11170 /* BKPT. */
11171 record_buf[0] = ARM_PS_REGNUM;
11172 record_buf[1] = ARM_LR_REGNUM;
11173 arm_insn_r->reg_rec_count = 2;
11174
11175 /* Save SPSR also;how? */
11176 return -1;
11177 }
11178 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11179 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11180 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11181 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11182 )
11183 {
11184 if (0 == insn_op1 || 1 == insn_op1)
11185 {
11186 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11187 /* We dont do optimization for SMULW<y> where we
11188 need only Rd. */
11189 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11190 record_buf[1] = ARM_PS_REGNUM;
11191 arm_insn_r->reg_rec_count = 2;
11192 }
11193 else if (2 == insn_op1)
11194 {
11195 /* SMLAL<x><y>. */
11196 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11197 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11198 arm_insn_r->reg_rec_count = 2;
11199 }
11200 else if (3 == insn_op1)
11201 {
11202 /* SMUL<x><y>. */
11203 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11204 arm_insn_r->reg_rec_count = 1;
11205 }
11206 }
11207 }
11208 else
11209 {
11210 /* MSR : immediate form. */
11211 if (1 == insn_op1)
11212 {
11213 /* CSPR is going to be changed. */
11214 record_buf[0] = ARM_PS_REGNUM;
11215 arm_insn_r->reg_rec_count = 1;
11216 }
11217 else if (3 == insn_op1)
11218 {
11219 /* SPSR is going to be changed. */
11220 /* we need to get SPSR value, which is yet to be done */
11221 return -1;
11222 }
11223 }
11224 }
11225
11226 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11227 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11228 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11229
11230 /* Handle load/store insn extension space. */
11231
11232 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11233 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11234 && !INSN_RECORDED(arm_insn_r))
11235 {
11236 /* SWP/SWPB. */
11237 if (0 == insn_op1)
11238 {
11239 /* These insn, changes register and memory as well. */
11240 /* SWP or SWPB insn. */
11241 /* Get memory address given by Rn. */
11242 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11243 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11244 /* SWP insn ?, swaps word. */
11245 if (8 == arm_insn_r->opcode)
11246 {
11247 record_buf_mem[0] = 4;
11248 }
11249 else
11250 {
11251 /* SWPB insn, swaps only byte. */
11252 record_buf_mem[0] = 1;
11253 }
11254 record_buf_mem[1] = u_regval;
11255 arm_insn_r->mem_rec_count = 1;
11256 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11257 arm_insn_r->reg_rec_count = 1;
11258 }
11259 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11260 {
11261 /* STRH. */
11262 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11263 ARM_RECORD_STRH);
11264 }
11265 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11266 {
11267 /* LDRD. */
11268 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11269 record_buf[1] = record_buf[0] + 1;
11270 arm_insn_r->reg_rec_count = 2;
11271 }
11272 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11273 {
11274 /* STRD. */
11275 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11276 ARM_RECORD_STRD);
11277 }
11278 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11279 {
11280 /* LDRH, LDRSB, LDRSH. */
11281 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11282 arm_insn_r->reg_rec_count = 1;
11283 }
11284
11285 }
11286
11287 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11288 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11289 && !INSN_RECORDED(arm_insn_r))
11290 {
11291 ret = -1;
11292 /* Handle coprocessor insn extension space. */
11293 }
11294
11295 /* To be done for ARMv5 and later; as of now we return -1. */
11296 if (-1 == ret)
11297 return ret;
11298
11299 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11300 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11301
11302 return ret;
11303 }
11304
11305 /* Handling opcode 000 insns. */
11306
11307 static int
11308 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11309 {
11310 struct regcache *reg_cache = arm_insn_r->regcache;
11311 uint32_t record_buf[8], record_buf_mem[8];
11312 ULONGEST u_regval[2] = {0};
11313
11314 uint32_t reg_src1 = 0;
11315 uint32_t opcode1 = 0;
11316
11317 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11318 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11319 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11320
11321 if (!((opcode1 & 0x19) == 0x10))
11322 {
11323 /* Data-processing (register) and Data-processing (register-shifted
11324 register */
11325 /* Out of 11 shifter operands mode, all the insn modifies destination
11326 register, which is specified by 13-16 decode. */
11327 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11328 record_buf[1] = ARM_PS_REGNUM;
11329 arm_insn_r->reg_rec_count = 2;
11330 }
11331 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11332 {
11333 /* Miscellaneous instructions */
11334
11335 if (3 == arm_insn_r->decode && 0x12 == opcode1
11336 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11337 {
11338 /* Handle BLX, branch and link/exchange. */
11339 if (9 == arm_insn_r->opcode)
11340 {
11341 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11342 and R14 stores the return address. */
11343 record_buf[0] = ARM_PS_REGNUM;
11344 record_buf[1] = ARM_LR_REGNUM;
11345 arm_insn_r->reg_rec_count = 2;
11346 }
11347 }
11348 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11349 {
11350 /* Handle enhanced software breakpoint insn, BKPT. */
11351 /* CPSR is changed to be executed in ARM state, disabling normal
11352 interrupts, entering abort mode. */
11353 /* According to high vector configuration PC is set. */
11354 /* user hit breakpoint and type reverse, in
11355 that case, we need to go back with previous CPSR and
11356 Program Counter. */
11357 record_buf[0] = ARM_PS_REGNUM;
11358 record_buf[1] = ARM_LR_REGNUM;
11359 arm_insn_r->reg_rec_count = 2;
11360
11361 /* Save SPSR also; how? */
11362 return -1;
11363 }
11364 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11365 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11366 {
11367 /* Handle BX, branch and link/exchange. */
11368 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11369 record_buf[0] = ARM_PS_REGNUM;
11370 arm_insn_r->reg_rec_count = 1;
11371 }
11372 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11373 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11374 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11375 {
11376 /* Count leading zeros: CLZ. */
11377 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11378 arm_insn_r->reg_rec_count = 1;
11379 }
11380 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11381 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11382 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11383 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11384 {
11385 /* Handle MRS insn. */
11386 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11387 arm_insn_r->reg_rec_count = 1;
11388 }
11389 }
11390 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11391 {
11392 /* Multiply and multiply-accumulate */
11393
11394 /* Handle multiply instructions. */
11395 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11396 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11397 {
11398 /* Handle MLA and MUL. */
11399 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11400 record_buf[1] = ARM_PS_REGNUM;
11401 arm_insn_r->reg_rec_count = 2;
11402 }
11403 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11404 {
11405 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11406 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11407 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11408 record_buf[2] = ARM_PS_REGNUM;
11409 arm_insn_r->reg_rec_count = 3;
11410 }
11411 }
11412 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11413 {
11414 /* Synchronization primitives */
11415
11416 /* Handling SWP, SWPB. */
11417 /* These insn, changes register and memory as well. */
11418 /* SWP or SWPB insn. */
11419
11420 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11421 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11422 /* SWP insn ?, swaps word. */
11423 if (8 == arm_insn_r->opcode)
11424 {
11425 record_buf_mem[0] = 4;
11426 }
11427 else
11428 {
11429 /* SWPB insn, swaps only byte. */
11430 record_buf_mem[0] = 1;
11431 }
11432 record_buf_mem[1] = u_regval[0];
11433 arm_insn_r->mem_rec_count = 1;
11434 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11435 arm_insn_r->reg_rec_count = 1;
11436 }
11437 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11438 || 15 == arm_insn_r->decode)
11439 {
11440 if ((opcode1 & 0x12) == 2)
11441 {
11442 /* Extra load/store (unprivileged) */
11443 return -1;
11444 }
11445 else
11446 {
11447 /* Extra load/store */
11448 switch (bits (arm_insn_r->arm_insn, 5, 6))
11449 {
11450 case 1:
11451 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11452 {
11453 /* STRH (register), STRH (immediate) */
11454 arm_record_strx (arm_insn_r, &record_buf[0],
11455 &record_buf_mem[0], ARM_RECORD_STRH);
11456 }
11457 else if ((opcode1 & 0x05) == 0x1)
11458 {
11459 /* LDRH (register) */
11460 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11461 arm_insn_r->reg_rec_count = 1;
11462
11463 if (bit (arm_insn_r->arm_insn, 21))
11464 {
11465 /* Write back to Rn. */
11466 record_buf[arm_insn_r->reg_rec_count++]
11467 = bits (arm_insn_r->arm_insn, 16, 19);
11468 }
11469 }
11470 else if ((opcode1 & 0x05) == 0x5)
11471 {
11472 /* LDRH (immediate), LDRH (literal) */
11473 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11474
11475 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11476 arm_insn_r->reg_rec_count = 1;
11477
11478 if (rn != 15)
11479 {
11480 /*LDRH (immediate) */
11481 if (bit (arm_insn_r->arm_insn, 21))
11482 {
11483 /* Write back to Rn. */
11484 record_buf[arm_insn_r->reg_rec_count++] = rn;
11485 }
11486 }
11487 }
11488 else
11489 return -1;
11490 break;
11491 case 2:
11492 if ((opcode1 & 0x05) == 0x0)
11493 {
11494 /* LDRD (register) */
11495 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11496 record_buf[1] = record_buf[0] + 1;
11497 arm_insn_r->reg_rec_count = 2;
11498
11499 if (bit (arm_insn_r->arm_insn, 21))
11500 {
11501 /* Write back to Rn. */
11502 record_buf[arm_insn_r->reg_rec_count++]
11503 = bits (arm_insn_r->arm_insn, 16, 19);
11504 }
11505 }
11506 else if ((opcode1 & 0x05) == 0x1)
11507 {
11508 /* LDRSB (register) */
11509 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11510 arm_insn_r->reg_rec_count = 1;
11511
11512 if (bit (arm_insn_r->arm_insn, 21))
11513 {
11514 /* Write back to Rn. */
11515 record_buf[arm_insn_r->reg_rec_count++]
11516 = bits (arm_insn_r->arm_insn, 16, 19);
11517 }
11518 }
11519 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11520 {
11521 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11522 LDRSB (literal) */
11523 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11524
11525 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11526 arm_insn_r->reg_rec_count = 1;
11527
11528 if (rn != 15)
11529 {
11530 /*LDRD (immediate), LDRSB (immediate) */
11531 if (bit (arm_insn_r->arm_insn, 21))
11532 {
11533 /* Write back to Rn. */
11534 record_buf[arm_insn_r->reg_rec_count++] = rn;
11535 }
11536 }
11537 }
11538 else
11539 return -1;
11540 break;
11541 case 3:
11542 if ((opcode1 & 0x05) == 0x0)
11543 {
11544 /* STRD (register) */
11545 arm_record_strx (arm_insn_r, &record_buf[0],
11546 &record_buf_mem[0], ARM_RECORD_STRD);
11547 }
11548 else if ((opcode1 & 0x05) == 0x1)
11549 {
11550 /* LDRSH (register) */
11551 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11552 arm_insn_r->reg_rec_count = 1;
11553
11554 if (bit (arm_insn_r->arm_insn, 21))
11555 {
11556 /* Write back to Rn. */
11557 record_buf[arm_insn_r->reg_rec_count++]
11558 = bits (arm_insn_r->arm_insn, 16, 19);
11559 }
11560 }
11561 else if ((opcode1 & 0x05) == 0x4)
11562 {
11563 /* STRD (immediate) */
11564 arm_record_strx (arm_insn_r, &record_buf[0],
11565 &record_buf_mem[0], ARM_RECORD_STRD);
11566 }
11567 else if ((opcode1 & 0x05) == 0x5)
11568 {
11569 /* LDRSH (immediate), LDRSH (literal) */
11570 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11571 arm_insn_r->reg_rec_count = 1;
11572
11573 if (bit (arm_insn_r->arm_insn, 21))
11574 {
11575 /* Write back to Rn. */
11576 record_buf[arm_insn_r->reg_rec_count++]
11577 = bits (arm_insn_r->arm_insn, 16, 19);
11578 }
11579 }
11580 else
11581 return -1;
11582 break;
11583 default:
11584 return -1;
11585 }
11586 }
11587 }
11588 else
11589 {
11590 return -1;
11591 }
11592
11593 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11594 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11595 return 0;
11596 }
11597
11598 /* Handling opcode 001 insns. */
11599
11600 static int
11601 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11602 {
11603 uint32_t record_buf[8], record_buf_mem[8];
11604
11605 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11606 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11607
11608 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11609 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11610 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11611 )
11612 {
11613 /* Handle MSR insn. */
11614 if (9 == arm_insn_r->opcode)
11615 {
11616 /* CSPR is going to be changed. */
11617 record_buf[0] = ARM_PS_REGNUM;
11618 arm_insn_r->reg_rec_count = 1;
11619 }
11620 else
11621 {
11622 /* SPSR is going to be changed. */
11623 }
11624 }
11625 else if (arm_insn_r->opcode <= 15)
11626 {
11627 /* Normal data processing insns. */
11628 /* Out of 11 shifter operands mode, all the insn modifies destination
11629 register, which is specified by 13-16 decode. */
11630 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11631 record_buf[1] = ARM_PS_REGNUM;
11632 arm_insn_r->reg_rec_count = 2;
11633 }
11634 else
11635 {
11636 return -1;
11637 }
11638
11639 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11640 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11641 return 0;
11642 }
11643
11644 static int
11645 arm_record_media (arm_insn_decode_record *arm_insn_r)
11646 {
11647 uint32_t record_buf[8];
11648
11649 switch (bits (arm_insn_r->arm_insn, 22, 24))
11650 {
11651 case 0:
11652 /* Parallel addition and subtraction, signed */
11653 case 1:
11654 /* Parallel addition and subtraction, unsigned */
11655 case 2:
11656 case 3:
11657 /* Packing, unpacking, saturation and reversal */
11658 {
11659 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11660
11661 record_buf[arm_insn_r->reg_rec_count++] = rd;
11662 }
11663 break;
11664
11665 case 4:
11666 case 5:
11667 /* Signed multiplies */
11668 {
11669 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11670 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11671
11672 record_buf[arm_insn_r->reg_rec_count++] = rd;
11673 if (op1 == 0x0)
11674 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11675 else if (op1 == 0x4)
11676 record_buf[arm_insn_r->reg_rec_count++]
11677 = bits (arm_insn_r->arm_insn, 12, 15);
11678 }
11679 break;
11680
11681 case 6:
11682 {
11683 if (bit (arm_insn_r->arm_insn, 21)
11684 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
11685 {
11686 /* SBFX */
11687 record_buf[arm_insn_r->reg_rec_count++]
11688 = bits (arm_insn_r->arm_insn, 12, 15);
11689 }
11690 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
11691 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
11692 {
11693 /* USAD8 and USADA8 */
11694 record_buf[arm_insn_r->reg_rec_count++]
11695 = bits (arm_insn_r->arm_insn, 16, 19);
11696 }
11697 }
11698 break;
11699
11700 case 7:
11701 {
11702 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
11703 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
11704 {
11705 /* Permanently UNDEFINED */
11706 return -1;
11707 }
11708 else
11709 {
11710 /* BFC, BFI and UBFX */
11711 record_buf[arm_insn_r->reg_rec_count++]
11712 = bits (arm_insn_r->arm_insn, 12, 15);
11713 }
11714 }
11715 break;
11716
11717 default:
11718 return -1;
11719 }
11720
11721 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11722
11723 return 0;
11724 }
11725
11726 /* Handle ARM mode instructions with opcode 010. */
11727
11728 static int
11729 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
11730 {
11731 struct regcache *reg_cache = arm_insn_r->regcache;
11732
11733 uint32_t reg_base , reg_dest;
11734 uint32_t offset_12, tgt_mem_addr;
11735 uint32_t record_buf[8], record_buf_mem[8];
11736 unsigned char wback;
11737 ULONGEST u_regval;
11738
11739 /* Calculate wback. */
11740 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11741 || (bit (arm_insn_r->arm_insn, 21) == 1);
11742
11743 arm_insn_r->reg_rec_count = 0;
11744 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11745
11746 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11747 {
11748 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11749 and LDRT. */
11750
11751 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11752 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11753
11754 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11755 preceeds a LDR instruction having R15 as reg_base, it
11756 emulates a branch and link instruction, and hence we need to save
11757 CPSR and PC as well. */
11758 if (ARM_PC_REGNUM == reg_dest)
11759 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11760
11761 /* If wback is true, also save the base register, which is going to be
11762 written to. */
11763 if (wback)
11764 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11765 }
11766 else
11767 {
11768 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11769
11770 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11771 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11772
11773 /* Handle bit U. */
11774 if (bit (arm_insn_r->arm_insn, 23))
11775 {
11776 /* U == 1: Add the offset. */
11777 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11778 }
11779 else
11780 {
11781 /* U == 0: subtract the offset. */
11782 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11783 }
11784
11785 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11786 bytes. */
11787 if (bit (arm_insn_r->arm_insn, 22))
11788 {
11789 /* STRB and STRBT: 1 byte. */
11790 record_buf_mem[0] = 1;
11791 }
11792 else
11793 {
11794 /* STR and STRT: 4 bytes. */
11795 record_buf_mem[0] = 4;
11796 }
11797
11798 /* Handle bit P. */
11799 if (bit (arm_insn_r->arm_insn, 24))
11800 record_buf_mem[1] = tgt_mem_addr;
11801 else
11802 record_buf_mem[1] = (uint32_t) u_regval;
11803
11804 arm_insn_r->mem_rec_count = 1;
11805
11806 /* If wback is true, also save the base register, which is going to be
11807 written to. */
11808 if (wback)
11809 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11810 }
11811
11812 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11813 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11814 return 0;
11815 }
11816
11817 /* Handling opcode 011 insns. */
11818
11819 static int
11820 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
11821 {
11822 struct regcache *reg_cache = arm_insn_r->regcache;
11823
11824 uint32_t shift_imm = 0;
11825 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11826 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11827 uint32_t record_buf[8], record_buf_mem[8];
11828
11829 LONGEST s_word;
11830 ULONGEST u_regval[2];
11831
11832 if (bit (arm_insn_r->arm_insn, 4))
11833 return arm_record_media (arm_insn_r);
11834
11835 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11836 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11837
11838 /* Handle enhanced store insns and LDRD DSP insn,
11839 order begins according to addressing modes for store insns
11840 STRH insn. */
11841
11842 /* LDR or STR? */
11843 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11844 {
11845 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11846 /* LDR insn has a capability to do branching, if
11847 MOV LR, PC is preceded by LDR insn having Rn as R15
11848 in that case, it emulates branch and link insn, and hence we
11849 need to save CSPR and PC as well. */
11850 if (15 != reg_dest)
11851 {
11852 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11853 arm_insn_r->reg_rec_count = 1;
11854 }
11855 else
11856 {
11857 record_buf[0] = reg_dest;
11858 record_buf[1] = ARM_PS_REGNUM;
11859 arm_insn_r->reg_rec_count = 2;
11860 }
11861 }
11862 else
11863 {
11864 if (! bits (arm_insn_r->arm_insn, 4, 11))
11865 {
11866 /* Store insn, register offset and register pre-indexed,
11867 register post-indexed. */
11868 /* Get Rm. */
11869 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11870 /* Get Rn. */
11871 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11872 regcache_raw_read_unsigned (reg_cache, reg_src1
11873 , &u_regval[0]);
11874 regcache_raw_read_unsigned (reg_cache, reg_src2
11875 , &u_regval[1]);
11876 if (15 == reg_src2)
11877 {
11878 /* If R15 was used as Rn, hence current PC+8. */
11879 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11880 u_regval[0] = u_regval[0] + 8;
11881 }
11882 /* Calculate target store address, Rn +/- Rm, register offset. */
11883 /* U == 1. */
11884 if (bit (arm_insn_r->arm_insn, 23))
11885 {
11886 tgt_mem_addr = u_regval[0] + u_regval[1];
11887 }
11888 else
11889 {
11890 tgt_mem_addr = u_regval[1] - u_regval[0];
11891 }
11892
11893 switch (arm_insn_r->opcode)
11894 {
11895 /* STR. */
11896 case 8:
11897 case 12:
11898 /* STR. */
11899 case 9:
11900 case 13:
11901 /* STRT. */
11902 case 1:
11903 case 5:
11904 /* STR. */
11905 case 0:
11906 case 4:
11907 record_buf_mem[0] = 4;
11908 break;
11909
11910 /* STRB. */
11911 case 10:
11912 case 14:
11913 /* STRB. */
11914 case 11:
11915 case 15:
11916 /* STRBT. */
11917 case 3:
11918 case 7:
11919 /* STRB. */
11920 case 2:
11921 case 6:
11922 record_buf_mem[0] = 1;
11923 break;
11924
11925 default:
11926 gdb_assert_not_reached ("no decoding pattern found");
11927 break;
11928 }
11929 record_buf_mem[1] = tgt_mem_addr;
11930 arm_insn_r->mem_rec_count = 1;
11931
11932 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11933 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11934 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11935 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11936 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11937 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11938 )
11939 {
11940 /* Rn is going to be changed in pre-indexed mode and
11941 post-indexed mode as well. */
11942 record_buf[0] = reg_src2;
11943 arm_insn_r->reg_rec_count = 1;
11944 }
11945 }
11946 else
11947 {
11948 /* Store insn, scaled register offset; scaled pre-indexed. */
11949 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11950 /* Get Rm. */
11951 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11952 /* Get Rn. */
11953 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11954 /* Get shift_imm. */
11955 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11956 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11957 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11958 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11959 /* Offset_12 used as shift. */
11960 switch (offset_12)
11961 {
11962 case 0:
11963 /* Offset_12 used as index. */
11964 offset_12 = u_regval[0] << shift_imm;
11965 break;
11966
11967 case 1:
11968 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11969 break;
11970
11971 case 2:
11972 if (!shift_imm)
11973 {
11974 if (bit (u_regval[0], 31))
11975 {
11976 offset_12 = 0xFFFFFFFF;
11977 }
11978 else
11979 {
11980 offset_12 = 0;
11981 }
11982 }
11983 else
11984 {
11985 /* This is arithmetic shift. */
11986 offset_12 = s_word >> shift_imm;
11987 }
11988 break;
11989
11990 case 3:
11991 if (!shift_imm)
11992 {
11993 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11994 &u_regval[1]);
11995 /* Get C flag value and shift it by 31. */
11996 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11997 | (u_regval[0]) >> 1);
11998 }
11999 else
12000 {
12001 offset_12 = (u_regval[0] >> shift_imm) \
12002 | (u_regval[0] <<
12003 (sizeof(uint32_t) - shift_imm));
12004 }
12005 break;
12006
12007 default:
12008 gdb_assert_not_reached ("no decoding pattern found");
12009 break;
12010 }
12011
12012 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12013 /* bit U set. */
12014 if (bit (arm_insn_r->arm_insn, 23))
12015 {
12016 tgt_mem_addr = u_regval[1] + offset_12;
12017 }
12018 else
12019 {
12020 tgt_mem_addr = u_regval[1] - offset_12;
12021 }
12022
12023 switch (arm_insn_r->opcode)
12024 {
12025 /* STR. */
12026 case 8:
12027 case 12:
12028 /* STR. */
12029 case 9:
12030 case 13:
12031 /* STRT. */
12032 case 1:
12033 case 5:
12034 /* STR. */
12035 case 0:
12036 case 4:
12037 record_buf_mem[0] = 4;
12038 break;
12039
12040 /* STRB. */
12041 case 10:
12042 case 14:
12043 /* STRB. */
12044 case 11:
12045 case 15:
12046 /* STRBT. */
12047 case 3:
12048 case 7:
12049 /* STRB. */
12050 case 2:
12051 case 6:
12052 record_buf_mem[0] = 1;
12053 break;
12054
12055 default:
12056 gdb_assert_not_reached ("no decoding pattern found");
12057 break;
12058 }
12059 record_buf_mem[1] = tgt_mem_addr;
12060 arm_insn_r->mem_rec_count = 1;
12061
12062 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12063 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12064 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12065 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12066 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12067 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12068 )
12069 {
12070 /* Rn is going to be changed in register scaled pre-indexed
12071 mode,and scaled post indexed mode. */
12072 record_buf[0] = reg_src2;
12073 arm_insn_r->reg_rec_count = 1;
12074 }
12075 }
12076 }
12077
12078 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12079 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12080 return 0;
12081 }
12082
12083 /* Handle ARM mode instructions with opcode 100. */
12084
12085 static int
12086 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12087 {
12088 struct regcache *reg_cache = arm_insn_r->regcache;
12089 uint32_t register_count = 0, register_bits;
12090 uint32_t reg_base, addr_mode;
12091 uint32_t record_buf[24], record_buf_mem[48];
12092 uint32_t wback;
12093 ULONGEST u_regval;
12094
12095 /* Fetch the list of registers. */
12096 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12097 arm_insn_r->reg_rec_count = 0;
12098
12099 /* Fetch the base register that contains the address we are loading data
12100 to. */
12101 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12102
12103 /* Calculate wback. */
12104 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12105
12106 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12107 {
12108 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12109
12110 /* Find out which registers are going to be loaded from memory. */
12111 while (register_bits)
12112 {
12113 if (register_bits & 0x00000001)
12114 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12115 register_bits = register_bits >> 1;
12116 register_count++;
12117 }
12118
12119
12120 /* If wback is true, also save the base register, which is going to be
12121 written to. */
12122 if (wback)
12123 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12124
12125 /* Save the CPSR register. */
12126 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12127 }
12128 else
12129 {
12130 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12131
12132 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12133
12134 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12135
12136 /* Find out how many registers are going to be stored to memory. */
12137 while (register_bits)
12138 {
12139 if (register_bits & 0x00000001)
12140 register_count++;
12141 register_bits = register_bits >> 1;
12142 }
12143
12144 switch (addr_mode)
12145 {
12146 /* STMDA (STMED): Decrement after. */
12147 case 0:
12148 record_buf_mem[1] = (uint32_t) u_regval
12149 - register_count * ARM_INT_REGISTER_SIZE + 4;
12150 break;
12151 /* STM (STMIA, STMEA): Increment after. */
12152 case 1:
12153 record_buf_mem[1] = (uint32_t) u_regval;
12154 break;
12155 /* STMDB (STMFD): Decrement before. */
12156 case 2:
12157 record_buf_mem[1] = (uint32_t) u_regval
12158 - register_count * ARM_INT_REGISTER_SIZE;
12159 break;
12160 /* STMIB (STMFA): Increment before. */
12161 case 3:
12162 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12163 break;
12164 default:
12165 gdb_assert_not_reached ("no decoding pattern found");
12166 break;
12167 }
12168
12169 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12170 arm_insn_r->mem_rec_count = 1;
12171
12172 /* If wback is true, also save the base register, which is going to be
12173 written to. */
12174 if (wback)
12175 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12176 }
12177
12178 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12179 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12180 return 0;
12181 }
12182
12183 /* Handling opcode 101 insns. */
12184
12185 static int
12186 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12187 {
12188 uint32_t record_buf[8];
12189
12190 /* Handle B, BL, BLX(1) insns. */
12191 /* B simply branches so we do nothing here. */
12192 /* Note: BLX(1) doesnt fall here but instead it falls into
12193 extension space. */
12194 if (bit (arm_insn_r->arm_insn, 24))
12195 {
12196 record_buf[0] = ARM_LR_REGNUM;
12197 arm_insn_r->reg_rec_count = 1;
12198 }
12199
12200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12201
12202 return 0;
12203 }
12204
12205 static int
12206 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12207 {
12208 gdb_printf (gdb_stderr,
12209 _("Process record does not support instruction "
12210 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12211 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12212
12213 return -1;
12214 }
12215
12216 /* Record handler for vector data transfer instructions. */
12217
12218 static int
12219 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12220 {
12221 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12222 uint32_t record_buf[4];
12223
12224 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12225 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12226 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12227 bit_l = bit (arm_insn_r->arm_insn, 20);
12228 bit_c = bit (arm_insn_r->arm_insn, 8);
12229
12230 /* Handle VMOV instruction. */
12231 if (bit_l && bit_c)
12232 {
12233 record_buf[0] = reg_t;
12234 arm_insn_r->reg_rec_count = 1;
12235 }
12236 else if (bit_l && !bit_c)
12237 {
12238 /* Handle VMOV instruction. */
12239 if (bits_a == 0x00)
12240 {
12241 record_buf[0] = reg_t;
12242 arm_insn_r->reg_rec_count = 1;
12243 }
12244 /* Handle VMRS instruction. */
12245 else if (bits_a == 0x07)
12246 {
12247 if (reg_t == 15)
12248 reg_t = ARM_PS_REGNUM;
12249
12250 record_buf[0] = reg_t;
12251 arm_insn_r->reg_rec_count = 1;
12252 }
12253 }
12254 else if (!bit_l && !bit_c)
12255 {
12256 /* Handle VMOV instruction. */
12257 if (bits_a == 0x00)
12258 {
12259 record_buf[0] = ARM_D0_REGNUM + reg_v;
12260
12261 arm_insn_r->reg_rec_count = 1;
12262 }
12263 /* Handle VMSR instruction. */
12264 else if (bits_a == 0x07)
12265 {
12266 record_buf[0] = ARM_FPSCR_REGNUM;
12267 arm_insn_r->reg_rec_count = 1;
12268 }
12269 }
12270 else if (!bit_l && bit_c)
12271 {
12272 /* Handle VMOV instruction. */
12273 if (!(bits_a & 0x04))
12274 {
12275 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12276 + ARM_D0_REGNUM;
12277 arm_insn_r->reg_rec_count = 1;
12278 }
12279 /* Handle VDUP instruction. */
12280 else
12281 {
12282 if (bit (arm_insn_r->arm_insn, 21))
12283 {
12284 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12285 record_buf[0] = reg_v + ARM_D0_REGNUM;
12286 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12287 arm_insn_r->reg_rec_count = 2;
12288 }
12289 else
12290 {
12291 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12292 record_buf[0] = reg_v + ARM_D0_REGNUM;
12293 arm_insn_r->reg_rec_count = 1;
12294 }
12295 }
12296 }
12297
12298 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12299 return 0;
12300 }
12301
12302 /* Record handler for extension register load/store instructions. */
12303
12304 static int
12305 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12306 {
12307 uint32_t opcode, single_reg;
12308 uint8_t op_vldm_vstm;
12309 uint32_t record_buf[8], record_buf_mem[128];
12310 ULONGEST u_regval = 0;
12311
12312 struct regcache *reg_cache = arm_insn_r->regcache;
12313
12314 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12315 single_reg = !bit (arm_insn_r->arm_insn, 8);
12316 op_vldm_vstm = opcode & 0x1b;
12317
12318 /* Handle VMOV instructions. */
12319 if ((opcode & 0x1e) == 0x04)
12320 {
12321 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12322 {
12323 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12324 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12325 arm_insn_r->reg_rec_count = 2;
12326 }
12327 else
12328 {
12329 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12330 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12331
12332 if (single_reg)
12333 {
12334 /* The first S register number m is REG_M:M (M is bit 5),
12335 the corresponding D register number is REG_M:M / 2, which
12336 is REG_M. */
12337 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12338 /* The second S register number is REG_M:M + 1, the
12339 corresponding D register number is (REG_M:M + 1) / 2.
12340 IOW, if bit M is 1, the first and second S registers
12341 are mapped to different D registers, otherwise, they are
12342 in the same D register. */
12343 if (bit_m)
12344 {
12345 record_buf[arm_insn_r->reg_rec_count++]
12346 = ARM_D0_REGNUM + reg_m + 1;
12347 }
12348 }
12349 else
12350 {
12351 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12352 arm_insn_r->reg_rec_count = 1;
12353 }
12354 }
12355 }
12356 /* Handle VSTM and VPUSH instructions. */
12357 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12358 || op_vldm_vstm == 0x12)
12359 {
12360 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12361 uint32_t memory_index = 0;
12362
12363 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12364 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12365 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12366 imm_off32 = imm_off8 << 2;
12367 memory_count = imm_off8;
12368
12369 if (bit (arm_insn_r->arm_insn, 23))
12370 start_address = u_regval;
12371 else
12372 start_address = u_regval - imm_off32;
12373
12374 if (bit (arm_insn_r->arm_insn, 21))
12375 {
12376 record_buf[0] = reg_rn;
12377 arm_insn_r->reg_rec_count = 1;
12378 }
12379
12380 while (memory_count > 0)
12381 {
12382 if (single_reg)
12383 {
12384 record_buf_mem[memory_index] = 4;
12385 record_buf_mem[memory_index + 1] = start_address;
12386 start_address = start_address + 4;
12387 memory_index = memory_index + 2;
12388 }
12389 else
12390 {
12391 record_buf_mem[memory_index] = 4;
12392 record_buf_mem[memory_index + 1] = start_address;
12393 record_buf_mem[memory_index + 2] = 4;
12394 record_buf_mem[memory_index + 3] = start_address + 4;
12395 start_address = start_address + 8;
12396 memory_index = memory_index + 4;
12397 }
12398 memory_count--;
12399 }
12400 arm_insn_r->mem_rec_count = (memory_index >> 1);
12401 }
12402 /* Handle VLDM instructions. */
12403 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12404 || op_vldm_vstm == 0x13)
12405 {
12406 uint32_t reg_count, reg_vd;
12407 uint32_t reg_index = 0;
12408 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12409
12410 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12411 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12412
12413 /* REG_VD is the first D register number. If the instruction
12414 loads memory to S registers (SINGLE_REG is TRUE), the register
12415 number is (REG_VD << 1 | bit D), so the corresponding D
12416 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12417 if (!single_reg)
12418 reg_vd = reg_vd | (bit_d << 4);
12419
12420 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12421 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12422
12423 /* If the instruction loads memory to D register, REG_COUNT should
12424 be divided by 2, according to the ARM Architecture Reference
12425 Manual. If the instruction loads memory to S register, divide by
12426 2 as well because two S registers are mapped to D register. */
12427 reg_count = reg_count / 2;
12428 if (single_reg && bit_d)
12429 {
12430 /* Increase the register count if S register list starts from
12431 an odd number (bit d is one). */
12432 reg_count++;
12433 }
12434
12435 while (reg_count > 0)
12436 {
12437 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12438 reg_count--;
12439 }
12440 arm_insn_r->reg_rec_count = reg_index;
12441 }
12442 /* VSTR Vector store register. */
12443 else if ((opcode & 0x13) == 0x10)
12444 {
12445 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12446 uint32_t memory_index = 0;
12447
12448 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12449 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12450 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12451 imm_off32 = imm_off8 << 2;
12452
12453 if (bit (arm_insn_r->arm_insn, 23))
12454 start_address = u_regval + imm_off32;
12455 else
12456 start_address = u_regval - imm_off32;
12457
12458 if (single_reg)
12459 {
12460 record_buf_mem[memory_index] = 4;
12461 record_buf_mem[memory_index + 1] = start_address;
12462 arm_insn_r->mem_rec_count = 1;
12463 }
12464 else
12465 {
12466 record_buf_mem[memory_index] = 4;
12467 record_buf_mem[memory_index + 1] = start_address;
12468 record_buf_mem[memory_index + 2] = 4;
12469 record_buf_mem[memory_index + 3] = start_address + 4;
12470 arm_insn_r->mem_rec_count = 2;
12471 }
12472 }
12473 /* VLDR Vector load register. */
12474 else if ((opcode & 0x13) == 0x11)
12475 {
12476 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12477
12478 if (!single_reg)
12479 {
12480 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12481 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12482 }
12483 else
12484 {
12485 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12486 /* Record register D rather than pseudo register S. */
12487 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12488 }
12489 arm_insn_r->reg_rec_count = 1;
12490 }
12491
12492 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12493 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12494 return 0;
12495 }
12496
12497 /* Record handler for arm/thumb mode VFP data processing instructions. */
12498
12499 static int
12500 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12501 {
12502 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12503 uint32_t record_buf[4];
12504 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12505 enum insn_types curr_insn_type = INSN_INV;
12506
12507 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12508 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12509 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12510 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12511 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12512 bit_d = bit (arm_insn_r->arm_insn, 22);
12513 /* Mask off the "D" bit. */
12514 opc1 = opc1 & ~0x04;
12515
12516 /* Handle VMLA, VMLS. */
12517 if (opc1 == 0x00)
12518 {
12519 if (bit (arm_insn_r->arm_insn, 10))
12520 {
12521 if (bit (arm_insn_r->arm_insn, 6))
12522 curr_insn_type = INSN_T0;
12523 else
12524 curr_insn_type = INSN_T1;
12525 }
12526 else
12527 {
12528 if (dp_op_sz)
12529 curr_insn_type = INSN_T1;
12530 else
12531 curr_insn_type = INSN_T2;
12532 }
12533 }
12534 /* Handle VNMLA, VNMLS, VNMUL. */
12535 else if (opc1 == 0x01)
12536 {
12537 if (dp_op_sz)
12538 curr_insn_type = INSN_T1;
12539 else
12540 curr_insn_type = INSN_T2;
12541 }
12542 /* Handle VMUL. */
12543 else if (opc1 == 0x02 && !(opc3 & 0x01))
12544 {
12545 if (bit (arm_insn_r->arm_insn, 10))
12546 {
12547 if (bit (arm_insn_r->arm_insn, 6))
12548 curr_insn_type = INSN_T0;
12549 else
12550 curr_insn_type = INSN_T1;
12551 }
12552 else
12553 {
12554 if (dp_op_sz)
12555 curr_insn_type = INSN_T1;
12556 else
12557 curr_insn_type = INSN_T2;
12558 }
12559 }
12560 /* Handle VADD, VSUB. */
12561 else if (opc1 == 0x03)
12562 {
12563 if (!bit (arm_insn_r->arm_insn, 9))
12564 {
12565 if (bit (arm_insn_r->arm_insn, 6))
12566 curr_insn_type = INSN_T0;
12567 else
12568 curr_insn_type = INSN_T1;
12569 }
12570 else
12571 {
12572 if (dp_op_sz)
12573 curr_insn_type = INSN_T1;
12574 else
12575 curr_insn_type = INSN_T2;
12576 }
12577 }
12578 /* Handle VDIV. */
12579 else if (opc1 == 0x08)
12580 {
12581 if (dp_op_sz)
12582 curr_insn_type = INSN_T1;
12583 else
12584 curr_insn_type = INSN_T2;
12585 }
12586 /* Handle all other vfp data processing instructions. */
12587 else if (opc1 == 0x0b)
12588 {
12589 /* Handle VMOV. */
12590 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12591 {
12592 if (bit (arm_insn_r->arm_insn, 4))
12593 {
12594 if (bit (arm_insn_r->arm_insn, 6))
12595 curr_insn_type = INSN_T0;
12596 else
12597 curr_insn_type = INSN_T1;
12598 }
12599 else
12600 {
12601 if (dp_op_sz)
12602 curr_insn_type = INSN_T1;
12603 else
12604 curr_insn_type = INSN_T2;
12605 }
12606 }
12607 /* Handle VNEG and VABS. */
12608 else if ((opc2 == 0x01 && opc3 == 0x01)
12609 || (opc2 == 0x00 && opc3 == 0x03))
12610 {
12611 if (!bit (arm_insn_r->arm_insn, 11))
12612 {
12613 if (bit (arm_insn_r->arm_insn, 6))
12614 curr_insn_type = INSN_T0;
12615 else
12616 curr_insn_type = INSN_T1;
12617 }
12618 else
12619 {
12620 if (dp_op_sz)
12621 curr_insn_type = INSN_T1;
12622 else
12623 curr_insn_type = INSN_T2;
12624 }
12625 }
12626 /* Handle VSQRT. */
12627 else if (opc2 == 0x01 && opc3 == 0x03)
12628 {
12629 if (dp_op_sz)
12630 curr_insn_type = INSN_T1;
12631 else
12632 curr_insn_type = INSN_T2;
12633 }
12634 /* Handle VCVT. */
12635 else if (opc2 == 0x07 && opc3 == 0x03)
12636 {
12637 if (!dp_op_sz)
12638 curr_insn_type = INSN_T1;
12639 else
12640 curr_insn_type = INSN_T2;
12641 }
12642 else if (opc3 & 0x01)
12643 {
12644 /* Handle VCVT. */
12645 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12646 {
12647 if (!bit (arm_insn_r->arm_insn, 18))
12648 curr_insn_type = INSN_T2;
12649 else
12650 {
12651 if (dp_op_sz)
12652 curr_insn_type = INSN_T1;
12653 else
12654 curr_insn_type = INSN_T2;
12655 }
12656 }
12657 /* Handle VCVT. */
12658 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12659 {
12660 if (dp_op_sz)
12661 curr_insn_type = INSN_T1;
12662 else
12663 curr_insn_type = INSN_T2;
12664 }
12665 /* Handle VCVTB, VCVTT. */
12666 else if ((opc2 & 0x0e) == 0x02)
12667 curr_insn_type = INSN_T2;
12668 /* Handle VCMP, VCMPE. */
12669 else if ((opc2 & 0x0e) == 0x04)
12670 curr_insn_type = INSN_T3;
12671 }
12672 }
12673
12674 switch (curr_insn_type)
12675 {
12676 case INSN_T0:
12677 reg_vd = reg_vd | (bit_d << 4);
12678 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12679 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12680 arm_insn_r->reg_rec_count = 2;
12681 break;
12682
12683 case INSN_T1:
12684 reg_vd = reg_vd | (bit_d << 4);
12685 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12686 arm_insn_r->reg_rec_count = 1;
12687 break;
12688
12689 case INSN_T2:
12690 reg_vd = (reg_vd << 1) | bit_d;
12691 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12692 arm_insn_r->reg_rec_count = 1;
12693 break;
12694
12695 case INSN_T3:
12696 record_buf[0] = ARM_FPSCR_REGNUM;
12697 arm_insn_r->reg_rec_count = 1;
12698 break;
12699
12700 default:
12701 gdb_assert_not_reached ("no decoding pattern found");
12702 break;
12703 }
12704
12705 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12706 return 0;
12707 }
12708
12709 /* Handling opcode 110 insns. */
12710
12711 static int
12712 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
12713 {
12714 uint32_t op1, op1_ebit, coproc;
12715
12716 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12717 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12718 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12719
12720 if ((coproc & 0x0e) == 0x0a)
12721 {
12722 /* Handle extension register ld/st instructions. */
12723 if (!(op1 & 0x20))
12724 return arm_record_exreg_ld_st_insn (arm_insn_r);
12725
12726 /* 64-bit transfers between arm core and extension registers. */
12727 if ((op1 & 0x3e) == 0x04)
12728 return arm_record_exreg_ld_st_insn (arm_insn_r);
12729 }
12730 else
12731 {
12732 /* Handle coprocessor ld/st instructions. */
12733 if (!(op1 & 0x3a))
12734 {
12735 /* Store. */
12736 if (!op1_ebit)
12737 return arm_record_unsupported_insn (arm_insn_r);
12738 else
12739 /* Load. */
12740 return arm_record_unsupported_insn (arm_insn_r);
12741 }
12742
12743 /* Move to coprocessor from two arm core registers. */
12744 if (op1 == 0x4)
12745 return arm_record_unsupported_insn (arm_insn_r);
12746
12747 /* Move to two arm core registers from coprocessor. */
12748 if (op1 == 0x5)
12749 {
12750 uint32_t reg_t[2];
12751
12752 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12753 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12754 arm_insn_r->reg_rec_count = 2;
12755
12756 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12757 return 0;
12758 }
12759 }
12760 return arm_record_unsupported_insn (arm_insn_r);
12761 }
12762
12763 /* Handling opcode 111 insns. */
12764
12765 static int
12766 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
12767 {
12768 uint32_t op, op1_ebit, coproc, bits_24_25;
12769 arm_gdbarch_tdep *tdep
12770 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
12771 struct regcache *reg_cache = arm_insn_r->regcache;
12772
12773 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12774 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12775 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12776 op = bit (arm_insn_r->arm_insn, 4);
12777 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
12778
12779 /* Handle arm SWI/SVC system call instructions. */
12780 if (bits_24_25 == 0x3)
12781 {
12782 if (tdep->arm_syscall_record != NULL)
12783 {
12784 ULONGEST svc_operand, svc_number;
12785
12786 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12787
12788 if (svc_operand) /* OABI. */
12789 svc_number = svc_operand - 0x900000;
12790 else /* EABI. */
12791 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12792
12793 return tdep->arm_syscall_record (reg_cache, svc_number);
12794 }
12795 else
12796 {
12797 gdb_printf (gdb_stderr, _("no syscall record support\n"));
12798 return -1;
12799 }
12800 }
12801 else if (bits_24_25 == 0x02)
12802 {
12803 if (op)
12804 {
12805 if ((coproc & 0x0e) == 0x0a)
12806 {
12807 /* 8, 16, and 32-bit transfer */
12808 return arm_record_vdata_transfer_insn (arm_insn_r);
12809 }
12810 else
12811 {
12812 if (op1_ebit)
12813 {
12814 /* MRC, MRC2 */
12815 uint32_t record_buf[1];
12816
12817 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12818 if (record_buf[0] == 15)
12819 record_buf[0] = ARM_PS_REGNUM;
12820
12821 arm_insn_r->reg_rec_count = 1;
12822 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12823 record_buf);
12824 return 0;
12825 }
12826 else
12827 {
12828 /* MCR, MCR2 */
12829 return -1;
12830 }
12831 }
12832 }
12833 else
12834 {
12835 if ((coproc & 0x0e) == 0x0a)
12836 {
12837 /* VFP data-processing instructions. */
12838 return arm_record_vfp_data_proc_insn (arm_insn_r);
12839 }
12840 else
12841 {
12842 /* CDP, CDP2 */
12843 return -1;
12844 }
12845 }
12846 }
12847 else
12848 {
12849 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
12850
12851 if (op1 == 5)
12852 {
12853 if ((coproc & 0x0e) != 0x0a)
12854 {
12855 /* MRRC, MRRC2 */
12856 return -1;
12857 }
12858 }
12859 else if (op1 == 4 || op1 == 5)
12860 {
12861 if ((coproc & 0x0e) == 0x0a)
12862 {
12863 /* 64-bit transfers between ARM core and extension */
12864 return -1;
12865 }
12866 else if (op1 == 4)
12867 {
12868 /* MCRR, MCRR2 */
12869 return -1;
12870 }
12871 }
12872 else if (op1 == 0 || op1 == 1)
12873 {
12874 /* UNDEFINED */
12875 return -1;
12876 }
12877 else
12878 {
12879 if ((coproc & 0x0e) == 0x0a)
12880 {
12881 /* Extension register load/store */
12882 }
12883 else
12884 {
12885 /* STC, STC2, LDC, LDC2 */
12886 }
12887 return -1;
12888 }
12889 }
12890
12891 return -1;
12892 }
12893
12894 /* Handling opcode 000 insns. */
12895
12896 static int
12897 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
12898 {
12899 uint32_t record_buf[8];
12900 uint32_t reg_src1 = 0;
12901
12902 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12903
12904 record_buf[0] = ARM_PS_REGNUM;
12905 record_buf[1] = reg_src1;
12906 thumb_insn_r->reg_rec_count = 2;
12907
12908 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12909
12910 return 0;
12911 }
12912
12913
12914 /* Handling opcode 001 insns. */
12915
12916 static int
12917 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
12918 {
12919 uint32_t record_buf[8];
12920 uint32_t reg_src1 = 0;
12921
12922 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12923
12924 record_buf[0] = ARM_PS_REGNUM;
12925 record_buf[1] = reg_src1;
12926 thumb_insn_r->reg_rec_count = 2;
12927
12928 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12929
12930 return 0;
12931 }
12932
12933 /* Handling opcode 010 insns. */
12934
12935 static int
12936 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
12937 {
12938 struct regcache *reg_cache = thumb_insn_r->regcache;
12939 uint32_t record_buf[8], record_buf_mem[8];
12940
12941 uint32_t reg_src1 = 0, reg_src2 = 0;
12942 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12943
12944 ULONGEST u_regval[2] = {0};
12945
12946 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12947
12948 if (bit (thumb_insn_r->arm_insn, 12))
12949 {
12950 /* Handle load/store register offset. */
12951 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12952
12953 if (in_inclusive_range (opB, 4U, 7U))
12954 {
12955 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12956 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12957 record_buf[0] = reg_src1;
12958 thumb_insn_r->reg_rec_count = 1;
12959 }
12960 else if (in_inclusive_range (opB, 0U, 2U))
12961 {
12962 /* STR(2), STRB(2), STRH(2) . */
12963 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12964 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12965 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12966 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12967 if (0 == opB)
12968 record_buf_mem[0] = 4; /* STR (2). */
12969 else if (2 == opB)
12970 record_buf_mem[0] = 1; /* STRB (2). */
12971 else if (1 == opB)
12972 record_buf_mem[0] = 2; /* STRH (2). */
12973 record_buf_mem[1] = u_regval[0] + u_regval[1];
12974 thumb_insn_r->mem_rec_count = 1;
12975 }
12976 }
12977 else if (bit (thumb_insn_r->arm_insn, 11))
12978 {
12979 /* Handle load from literal pool. */
12980 /* LDR(3). */
12981 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12982 record_buf[0] = reg_src1;
12983 thumb_insn_r->reg_rec_count = 1;
12984 }
12985 else if (opcode1)
12986 {
12987 /* Special data instructions and branch and exchange */
12988 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12989 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12990 if ((3 == opcode2) && (!opcode3))
12991 {
12992 /* Branch with exchange. */
12993 record_buf[0] = ARM_PS_REGNUM;
12994 thumb_insn_r->reg_rec_count = 1;
12995 }
12996 else
12997 {
12998 /* Format 8; special data processing insns. */
12999 record_buf[0] = ARM_PS_REGNUM;
13000 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13001 | bits (thumb_insn_r->arm_insn, 0, 2));
13002 thumb_insn_r->reg_rec_count = 2;
13003 }
13004 }
13005 else
13006 {
13007 /* Format 5; data processing insns. */
13008 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13009 if (bit (thumb_insn_r->arm_insn, 7))
13010 {
13011 reg_src1 = reg_src1 + 8;
13012 }
13013 record_buf[0] = ARM_PS_REGNUM;
13014 record_buf[1] = reg_src1;
13015 thumb_insn_r->reg_rec_count = 2;
13016 }
13017
13018 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13019 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13020 record_buf_mem);
13021
13022 return 0;
13023 }
13024
13025 /* Handling opcode 001 insns. */
13026
13027 static int
13028 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13029 {
13030 struct regcache *reg_cache = thumb_insn_r->regcache;
13031 uint32_t record_buf[8], record_buf_mem[8];
13032
13033 uint32_t reg_src1 = 0;
13034 uint32_t opcode = 0, immed_5 = 0;
13035
13036 ULONGEST u_regval = 0;
13037
13038 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13039
13040 if (opcode)
13041 {
13042 /* LDR(1). */
13043 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13044 record_buf[0] = reg_src1;
13045 thumb_insn_r->reg_rec_count = 1;
13046 }
13047 else
13048 {
13049 /* STR(1). */
13050 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13051 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13052 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13053 record_buf_mem[0] = 4;
13054 record_buf_mem[1] = u_regval + (immed_5 * 4);
13055 thumb_insn_r->mem_rec_count = 1;
13056 }
13057
13058 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13059 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13060 record_buf_mem);
13061
13062 return 0;
13063 }
13064
13065 /* Handling opcode 100 insns. */
13066
13067 static int
13068 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13069 {
13070 struct regcache *reg_cache = thumb_insn_r->regcache;
13071 uint32_t record_buf[8], record_buf_mem[8];
13072
13073 uint32_t reg_src1 = 0;
13074 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13075
13076 ULONGEST u_regval = 0;
13077
13078 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13079
13080 if (3 == opcode)
13081 {
13082 /* LDR(4). */
13083 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13084 record_buf[0] = reg_src1;
13085 thumb_insn_r->reg_rec_count = 1;
13086 }
13087 else if (1 == opcode)
13088 {
13089 /* LDRH(1). */
13090 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13091 record_buf[0] = reg_src1;
13092 thumb_insn_r->reg_rec_count = 1;
13093 }
13094 else if (2 == opcode)
13095 {
13096 /* STR(3). */
13097 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13098 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13099 record_buf_mem[0] = 4;
13100 record_buf_mem[1] = u_regval + (immed_8 * 4);
13101 thumb_insn_r->mem_rec_count = 1;
13102 }
13103 else if (0 == opcode)
13104 {
13105 /* STRH(1). */
13106 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13107 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13108 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13109 record_buf_mem[0] = 2;
13110 record_buf_mem[1] = u_regval + (immed_5 * 2);
13111 thumb_insn_r->mem_rec_count = 1;
13112 }
13113
13114 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13115 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13116 record_buf_mem);
13117
13118 return 0;
13119 }
13120
13121 /* Handling opcode 101 insns. */
13122
13123 static int
13124 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13125 {
13126 struct regcache *reg_cache = thumb_insn_r->regcache;
13127
13128 uint32_t opcode = 0;
13129 uint32_t register_bits = 0, register_count = 0;
13130 uint32_t index = 0, start_address = 0;
13131 uint32_t record_buf[24], record_buf_mem[48];
13132 uint32_t reg_src1;
13133
13134 ULONGEST u_regval = 0;
13135
13136 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13137
13138 if (opcode == 0 || opcode == 1)
13139 {
13140 /* ADR and ADD (SP plus immediate) */
13141
13142 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13143 record_buf[0] = reg_src1;
13144 thumb_insn_r->reg_rec_count = 1;
13145 }
13146 else
13147 {
13148 /* Miscellaneous 16-bit instructions */
13149 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13150
13151 switch (opcode2)
13152 {
13153 case 6:
13154 /* SETEND and CPS */
13155 break;
13156 case 0:
13157 /* ADD/SUB (SP plus immediate) */
13158 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13159 record_buf[0] = ARM_SP_REGNUM;
13160 thumb_insn_r->reg_rec_count = 1;
13161 break;
13162 case 1: /* fall through */
13163 case 3: /* fall through */
13164 case 9: /* fall through */
13165 case 11:
13166 /* CBNZ, CBZ */
13167 break;
13168 case 2:
13169 /* SXTH, SXTB, UXTH, UXTB */
13170 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13171 thumb_insn_r->reg_rec_count = 1;
13172 break;
13173 case 4: /* fall through */
13174 case 5:
13175 /* PUSH. */
13176 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13177 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13178 while (register_bits)
13179 {
13180 if (register_bits & 0x00000001)
13181 register_count++;
13182 register_bits = register_bits >> 1;
13183 }
13184 start_address = u_regval - \
13185 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13186 thumb_insn_r->mem_rec_count = register_count;
13187 while (register_count)
13188 {
13189 record_buf_mem[(register_count * 2) - 1] = start_address;
13190 record_buf_mem[(register_count * 2) - 2] = 4;
13191 start_address = start_address + 4;
13192 register_count--;
13193 }
13194 record_buf[0] = ARM_SP_REGNUM;
13195 thumb_insn_r->reg_rec_count = 1;
13196 break;
13197 case 10:
13198 /* REV, REV16, REVSH */
13199 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13200 thumb_insn_r->reg_rec_count = 1;
13201 break;
13202 case 12: /* fall through */
13203 case 13:
13204 /* POP. */
13205 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13206 while (register_bits)
13207 {
13208 if (register_bits & 0x00000001)
13209 record_buf[index++] = register_count;
13210 register_bits = register_bits >> 1;
13211 register_count++;
13212 }
13213 record_buf[index++] = ARM_PS_REGNUM;
13214 record_buf[index++] = ARM_SP_REGNUM;
13215 thumb_insn_r->reg_rec_count = index;
13216 break;
13217 case 0xe:
13218 /* BKPT insn. */
13219 /* Handle enhanced software breakpoint insn, BKPT. */
13220 /* CPSR is changed to be executed in ARM state, disabling normal
13221 interrupts, entering abort mode. */
13222 /* According to high vector configuration PC is set. */
13223 /* User hits breakpoint and type reverse, in that case, we need to go back with
13224 previous CPSR and Program Counter. */
13225 record_buf[0] = ARM_PS_REGNUM;
13226 record_buf[1] = ARM_LR_REGNUM;
13227 thumb_insn_r->reg_rec_count = 2;
13228 /* We need to save SPSR value, which is not yet done. */
13229 gdb_printf (gdb_stderr,
13230 _("Process record does not support instruction "
13231 "0x%0x at address %s.\n"),
13232 thumb_insn_r->arm_insn,
13233 paddress (thumb_insn_r->gdbarch,
13234 thumb_insn_r->this_addr));
13235 return -1;
13236
13237 case 0xf:
13238 /* If-Then, and hints */
13239 break;
13240 default:
13241 return -1;
13242 };
13243 }
13244
13245 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13246 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13247 record_buf_mem);
13248
13249 return 0;
13250 }
13251
13252 /* Handling opcode 110 insns. */
13253
13254 static int
13255 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13256 {
13257 arm_gdbarch_tdep *tdep
13258 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
13259 struct regcache *reg_cache = thumb_insn_r->regcache;
13260
13261 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13262 uint32_t reg_src1 = 0;
13263 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13264 uint32_t index = 0, start_address = 0;
13265 uint32_t record_buf[24], record_buf_mem[48];
13266
13267 ULONGEST u_regval = 0;
13268
13269 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13270 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13271
13272 if (1 == opcode2)
13273 {
13274
13275 /* LDMIA. */
13276 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13277 /* Get Rn. */
13278 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13279 while (register_bits)
13280 {
13281 if (register_bits & 0x00000001)
13282 record_buf[index++] = register_count;
13283 register_bits = register_bits >> 1;
13284 register_count++;
13285 }
13286 record_buf[index++] = reg_src1;
13287 thumb_insn_r->reg_rec_count = index;
13288 }
13289 else if (0 == opcode2)
13290 {
13291 /* It handles both STMIA. */
13292 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13293 /* Get Rn. */
13294 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13295 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13296 while (register_bits)
13297 {
13298 if (register_bits & 0x00000001)
13299 register_count++;
13300 register_bits = register_bits >> 1;
13301 }
13302 start_address = u_regval;
13303 thumb_insn_r->mem_rec_count = register_count;
13304 while (register_count)
13305 {
13306 record_buf_mem[(register_count * 2) - 1] = start_address;
13307 record_buf_mem[(register_count * 2) - 2] = 4;
13308 start_address = start_address + 4;
13309 register_count--;
13310 }
13311 }
13312 else if (0x1F == opcode1)
13313 {
13314 /* Handle arm syscall insn. */
13315 if (tdep->arm_syscall_record != NULL)
13316 {
13317 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13318 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13319 }
13320 else
13321 {
13322 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13323 return -1;
13324 }
13325 }
13326
13327 /* B (1), conditional branch is automatically taken care in process_record,
13328 as PC is saved there. */
13329
13330 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13331 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13332 record_buf_mem);
13333
13334 return ret;
13335 }
13336
13337 /* Handling opcode 111 insns. */
13338
13339 static int
13340 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13341 {
13342 uint32_t record_buf[8];
13343 uint32_t bits_h = 0;
13344
13345 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13346
13347 if (2 == bits_h || 3 == bits_h)
13348 {
13349 /* BL */
13350 record_buf[0] = ARM_LR_REGNUM;
13351 thumb_insn_r->reg_rec_count = 1;
13352 }
13353 else if (1 == bits_h)
13354 {
13355 /* BLX(1). */
13356 record_buf[0] = ARM_PS_REGNUM;
13357 record_buf[1] = ARM_LR_REGNUM;
13358 thumb_insn_r->reg_rec_count = 2;
13359 }
13360
13361 /* B(2) is automatically taken care in process_record, as PC is
13362 saved there. */
13363
13364 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13365
13366 return 0;
13367 }
13368
13369 /* Handler for thumb2 load/store multiple instructions. */
13370
13371 static int
13372 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13373 {
13374 struct regcache *reg_cache = thumb2_insn_r->regcache;
13375
13376 uint32_t reg_rn, op;
13377 uint32_t register_bits = 0, register_count = 0;
13378 uint32_t index = 0, start_address = 0;
13379 uint32_t record_buf[24], record_buf_mem[48];
13380
13381 ULONGEST u_regval = 0;
13382
13383 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13384 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13385
13386 if (0 == op || 3 == op)
13387 {
13388 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13389 {
13390 /* Handle RFE instruction. */
13391 record_buf[0] = ARM_PS_REGNUM;
13392 thumb2_insn_r->reg_rec_count = 1;
13393 }
13394 else
13395 {
13396 /* Handle SRS instruction after reading banked SP. */
13397 return arm_record_unsupported_insn (thumb2_insn_r);
13398 }
13399 }
13400 else if (1 == op || 2 == op)
13401 {
13402 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13403 {
13404 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13405 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13406 while (register_bits)
13407 {
13408 if (register_bits & 0x00000001)
13409 record_buf[index++] = register_count;
13410
13411 register_count++;
13412 register_bits = register_bits >> 1;
13413 }
13414 record_buf[index++] = reg_rn;
13415 record_buf[index++] = ARM_PS_REGNUM;
13416 thumb2_insn_r->reg_rec_count = index;
13417 }
13418 else
13419 {
13420 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13421 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13422 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13423 while (register_bits)
13424 {
13425 if (register_bits & 0x00000001)
13426 register_count++;
13427
13428 register_bits = register_bits >> 1;
13429 }
13430
13431 if (1 == op)
13432 {
13433 /* Start address calculation for LDMDB/LDMEA. */
13434 start_address = u_regval;
13435 }
13436 else if (2 == op)
13437 {
13438 /* Start address calculation for LDMDB/LDMEA. */
13439 start_address = u_regval - register_count * 4;
13440 }
13441
13442 thumb2_insn_r->mem_rec_count = register_count;
13443 while (register_count)
13444 {
13445 record_buf_mem[register_count * 2 - 1] = start_address;
13446 record_buf_mem[register_count * 2 - 2] = 4;
13447 start_address = start_address + 4;
13448 register_count--;
13449 }
13450 record_buf[0] = reg_rn;
13451 record_buf[1] = ARM_PS_REGNUM;
13452 thumb2_insn_r->reg_rec_count = 2;
13453 }
13454 }
13455
13456 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13457 record_buf_mem);
13458 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13459 record_buf);
13460 return ARM_RECORD_SUCCESS;
13461 }
13462
13463 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13464 instructions. */
13465
13466 static int
13467 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13468 {
13469 struct regcache *reg_cache = thumb2_insn_r->regcache;
13470
13471 uint32_t reg_rd, reg_rn, offset_imm;
13472 uint32_t reg_dest1, reg_dest2;
13473 uint32_t address, offset_addr;
13474 uint32_t record_buf[8], record_buf_mem[8];
13475 uint32_t op1, op2, op3;
13476
13477 ULONGEST u_regval[2];
13478
13479 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13480 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13481 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13482
13483 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13484 {
13485 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13486 {
13487 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13488 record_buf[0] = reg_dest1;
13489 record_buf[1] = ARM_PS_REGNUM;
13490 thumb2_insn_r->reg_rec_count = 2;
13491 }
13492
13493 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13494 {
13495 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13496 record_buf[2] = reg_dest2;
13497 thumb2_insn_r->reg_rec_count = 3;
13498 }
13499 }
13500 else
13501 {
13502 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13503 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13504
13505 if (0 == op1 && 0 == op2)
13506 {
13507 /* Handle STREX. */
13508 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13509 address = u_regval[0] + (offset_imm * 4);
13510 record_buf_mem[0] = 4;
13511 record_buf_mem[1] = address;
13512 thumb2_insn_r->mem_rec_count = 1;
13513 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13514 record_buf[0] = reg_rd;
13515 thumb2_insn_r->reg_rec_count = 1;
13516 }
13517 else if (1 == op1 && 0 == op2)
13518 {
13519 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13520 record_buf[0] = reg_rd;
13521 thumb2_insn_r->reg_rec_count = 1;
13522 address = u_regval[0];
13523 record_buf_mem[1] = address;
13524
13525 if (4 == op3)
13526 {
13527 /* Handle STREXB. */
13528 record_buf_mem[0] = 1;
13529 thumb2_insn_r->mem_rec_count = 1;
13530 }
13531 else if (5 == op3)
13532 {
13533 /* Handle STREXH. */
13534 record_buf_mem[0] = 2 ;
13535 thumb2_insn_r->mem_rec_count = 1;
13536 }
13537 else if (7 == op3)
13538 {
13539 /* Handle STREXD. */
13540 address = u_regval[0];
13541 record_buf_mem[0] = 4;
13542 record_buf_mem[2] = 4;
13543 record_buf_mem[3] = address + 4;
13544 thumb2_insn_r->mem_rec_count = 2;
13545 }
13546 }
13547 else
13548 {
13549 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13550
13551 if (bit (thumb2_insn_r->arm_insn, 24))
13552 {
13553 if (bit (thumb2_insn_r->arm_insn, 23))
13554 offset_addr = u_regval[0] + (offset_imm * 4);
13555 else
13556 offset_addr = u_regval[0] - (offset_imm * 4);
13557
13558 address = offset_addr;
13559 }
13560 else
13561 address = u_regval[0];
13562
13563 record_buf_mem[0] = 4;
13564 record_buf_mem[1] = address;
13565 record_buf_mem[2] = 4;
13566 record_buf_mem[3] = address + 4;
13567 thumb2_insn_r->mem_rec_count = 2;
13568 record_buf[0] = reg_rn;
13569 thumb2_insn_r->reg_rec_count = 1;
13570 }
13571 }
13572
13573 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13574 record_buf);
13575 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13576 record_buf_mem);
13577 return ARM_RECORD_SUCCESS;
13578 }
13579
13580 /* Handler for thumb2 data processing (shift register and modified immediate)
13581 instructions. */
13582
13583 static int
13584 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13585 {
13586 uint32_t reg_rd, op;
13587 uint32_t record_buf[8];
13588
13589 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13590 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13591
13592 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13593 {
13594 record_buf[0] = ARM_PS_REGNUM;
13595 thumb2_insn_r->reg_rec_count = 1;
13596 }
13597 else
13598 {
13599 record_buf[0] = reg_rd;
13600 record_buf[1] = ARM_PS_REGNUM;
13601 thumb2_insn_r->reg_rec_count = 2;
13602 }
13603
13604 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13605 record_buf);
13606 return ARM_RECORD_SUCCESS;
13607 }
13608
13609 /* Generic handler for thumb2 instructions which effect destination and PS
13610 registers. */
13611
13612 static int
13613 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13614 {
13615 uint32_t reg_rd;
13616 uint32_t record_buf[8];
13617
13618 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13619
13620 record_buf[0] = reg_rd;
13621 record_buf[1] = ARM_PS_REGNUM;
13622 thumb2_insn_r->reg_rec_count = 2;
13623
13624 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13625 record_buf);
13626 return ARM_RECORD_SUCCESS;
13627 }
13628
13629 /* Handler for thumb2 branch and miscellaneous control instructions. */
13630
13631 static int
13632 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13633 {
13634 uint32_t op, op1, op2;
13635 uint32_t record_buf[8];
13636
13637 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13638 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13639 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13640
13641 /* Handle MSR insn. */
13642 if (!(op1 & 0x2) && 0x38 == op)
13643 {
13644 if (!(op2 & 0x3))
13645 {
13646 /* CPSR is going to be changed. */
13647 record_buf[0] = ARM_PS_REGNUM;
13648 thumb2_insn_r->reg_rec_count = 1;
13649 }
13650 else
13651 {
13652 arm_record_unsupported_insn(thumb2_insn_r);
13653 return -1;
13654 }
13655 }
13656 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13657 {
13658 /* BLX. */
13659 record_buf[0] = ARM_PS_REGNUM;
13660 record_buf[1] = ARM_LR_REGNUM;
13661 thumb2_insn_r->reg_rec_count = 2;
13662 }
13663
13664 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13665 record_buf);
13666 return ARM_RECORD_SUCCESS;
13667 }
13668
13669 /* Handler for thumb2 store single data item instructions. */
13670
13671 static int
13672 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
13673 {
13674 struct regcache *reg_cache = thumb2_insn_r->regcache;
13675
13676 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13677 uint32_t address, offset_addr;
13678 uint32_t record_buf[8], record_buf_mem[8];
13679 uint32_t op1, op2;
13680
13681 ULONGEST u_regval[2];
13682
13683 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13684 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13685 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13686 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13687
13688 if (bit (thumb2_insn_r->arm_insn, 23))
13689 {
13690 /* T2 encoding. */
13691 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13692 offset_addr = u_regval[0] + offset_imm;
13693 address = offset_addr;
13694 }
13695 else
13696 {
13697 /* T3 encoding. */
13698 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13699 {
13700 /* Handle STRB (register). */
13701 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13702 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13703 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13704 offset_addr = u_regval[1] << shift_imm;
13705 address = u_regval[0] + offset_addr;
13706 }
13707 else
13708 {
13709 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13710 if (bit (thumb2_insn_r->arm_insn, 10))
13711 {
13712 if (bit (thumb2_insn_r->arm_insn, 9))
13713 offset_addr = u_regval[0] + offset_imm;
13714 else
13715 offset_addr = u_regval[0] - offset_imm;
13716
13717 address = offset_addr;
13718 }
13719 else
13720 address = u_regval[0];
13721 }
13722 }
13723
13724 switch (op1)
13725 {
13726 /* Store byte instructions. */
13727 case 4:
13728 case 0:
13729 record_buf_mem[0] = 1;
13730 break;
13731 /* Store half word instructions. */
13732 case 1:
13733 case 5:
13734 record_buf_mem[0] = 2;
13735 break;
13736 /* Store word instructions. */
13737 case 2:
13738 case 6:
13739 record_buf_mem[0] = 4;
13740 break;
13741
13742 default:
13743 gdb_assert_not_reached ("no decoding pattern found");
13744 break;
13745 }
13746
13747 record_buf_mem[1] = address;
13748 thumb2_insn_r->mem_rec_count = 1;
13749 record_buf[0] = reg_rn;
13750 thumb2_insn_r->reg_rec_count = 1;
13751
13752 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13753 record_buf);
13754 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13755 record_buf_mem);
13756 return ARM_RECORD_SUCCESS;
13757 }
13758
13759 /* Handler for thumb2 load memory hints instructions. */
13760
13761 static int
13762 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
13763 {
13764 uint32_t record_buf[8];
13765 uint32_t reg_rt, reg_rn;
13766
13767 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13768 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13769
13770 if (ARM_PC_REGNUM != reg_rt)
13771 {
13772 record_buf[0] = reg_rt;
13773 record_buf[1] = reg_rn;
13774 record_buf[2] = ARM_PS_REGNUM;
13775 thumb2_insn_r->reg_rec_count = 3;
13776
13777 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13778 record_buf);
13779 return ARM_RECORD_SUCCESS;
13780 }
13781
13782 return ARM_RECORD_FAILURE;
13783 }
13784
13785 /* Handler for thumb2 load word instructions. */
13786
13787 static int
13788 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
13789 {
13790 uint32_t record_buf[8];
13791
13792 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13793 record_buf[1] = ARM_PS_REGNUM;
13794 thumb2_insn_r->reg_rec_count = 2;
13795
13796 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13797 record_buf);
13798 return ARM_RECORD_SUCCESS;
13799 }
13800
13801 /* Handler for thumb2 long multiply, long multiply accumulate, and
13802 divide instructions. */
13803
13804 static int
13805 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
13806 {
13807 uint32_t opcode1 = 0, opcode2 = 0;
13808 uint32_t record_buf[8];
13809
13810 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13811 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13812
13813 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13814 {
13815 /* Handle SMULL, UMULL, SMULAL. */
13816 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13817 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13818 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13819 record_buf[2] = ARM_PS_REGNUM;
13820 thumb2_insn_r->reg_rec_count = 3;
13821 }
13822 else if (1 == opcode1 || 3 == opcode2)
13823 {
13824 /* Handle SDIV and UDIV. */
13825 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13826 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13827 record_buf[2] = ARM_PS_REGNUM;
13828 thumb2_insn_r->reg_rec_count = 3;
13829 }
13830 else
13831 return ARM_RECORD_FAILURE;
13832
13833 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13834 record_buf);
13835 return ARM_RECORD_SUCCESS;
13836 }
13837
13838 /* Record handler for thumb32 coprocessor instructions. */
13839
13840 static int
13841 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
13842 {
13843 if (bit (thumb2_insn_r->arm_insn, 25))
13844 return arm_record_coproc_data_proc (thumb2_insn_r);
13845 else
13846 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13847 }
13848
13849 /* Record handler for advance SIMD structure load/store instructions. */
13850
13851 static int
13852 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
13853 {
13854 struct regcache *reg_cache = thumb2_insn_r->regcache;
13855 uint32_t l_bit, a_bit, b_bits;
13856 uint32_t record_buf[128], record_buf_mem[128];
13857 uint32_t reg_rn, reg_vd, address, f_elem;
13858 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13859 uint8_t f_ebytes;
13860
13861 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13862 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13863 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13864 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13865 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13866 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13867 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13868 f_elem = 8 / f_ebytes;
13869
13870 if (!l_bit)
13871 {
13872 ULONGEST u_regval = 0;
13873 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13874 address = u_regval;
13875
13876 if (!a_bit)
13877 {
13878 /* Handle VST1. */
13879 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13880 {
13881 if (b_bits == 0x07)
13882 bf_regs = 1;
13883 else if (b_bits == 0x0a)
13884 bf_regs = 2;
13885 else if (b_bits == 0x06)
13886 bf_regs = 3;
13887 else if (b_bits == 0x02)
13888 bf_regs = 4;
13889 else
13890 bf_regs = 0;
13891
13892 for (index_r = 0; index_r < bf_regs; index_r++)
13893 {
13894 for (index_e = 0; index_e < f_elem; index_e++)
13895 {
13896 record_buf_mem[index_m++] = f_ebytes;
13897 record_buf_mem[index_m++] = address;
13898 address = address + f_ebytes;
13899 thumb2_insn_r->mem_rec_count += 1;
13900 }
13901 }
13902 }
13903 /* Handle VST2. */
13904 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13905 {
13906 if (b_bits == 0x09 || b_bits == 0x08)
13907 bf_regs = 1;
13908 else if (b_bits == 0x03)
13909 bf_regs = 2;
13910 else
13911 bf_regs = 0;
13912
13913 for (index_r = 0; index_r < bf_regs; index_r++)
13914 for (index_e = 0; index_e < f_elem; index_e++)
13915 {
13916 for (loop_t = 0; loop_t < 2; loop_t++)
13917 {
13918 record_buf_mem[index_m++] = f_ebytes;
13919 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13920 thumb2_insn_r->mem_rec_count += 1;
13921 }
13922 address = address + (2 * f_ebytes);
13923 }
13924 }
13925 /* Handle VST3. */
13926 else if ((b_bits & 0x0e) == 0x04)
13927 {
13928 for (index_e = 0; index_e < f_elem; index_e++)
13929 {
13930 for (loop_t = 0; loop_t < 3; loop_t++)
13931 {
13932 record_buf_mem[index_m++] = f_ebytes;
13933 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13934 thumb2_insn_r->mem_rec_count += 1;
13935 }
13936 address = address + (3 * f_ebytes);
13937 }
13938 }
13939 /* Handle VST4. */
13940 else if (!(b_bits & 0x0e))
13941 {
13942 for (index_e = 0; index_e < f_elem; index_e++)
13943 {
13944 for (loop_t = 0; loop_t < 4; loop_t++)
13945 {
13946 record_buf_mem[index_m++] = f_ebytes;
13947 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13948 thumb2_insn_r->mem_rec_count += 1;
13949 }
13950 address = address + (4 * f_ebytes);
13951 }
13952 }
13953 }
13954 else
13955 {
13956 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13957
13958 if (bft_size == 0x00)
13959 f_ebytes = 1;
13960 else if (bft_size == 0x01)
13961 f_ebytes = 2;
13962 else if (bft_size == 0x02)
13963 f_ebytes = 4;
13964 else
13965 f_ebytes = 0;
13966
13967 /* Handle VST1. */
13968 if (!(b_bits & 0x0b) || b_bits == 0x08)
13969 thumb2_insn_r->mem_rec_count = 1;
13970 /* Handle VST2. */
13971 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13972 thumb2_insn_r->mem_rec_count = 2;
13973 /* Handle VST3. */
13974 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13975 thumb2_insn_r->mem_rec_count = 3;
13976 /* Handle VST4. */
13977 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13978 thumb2_insn_r->mem_rec_count = 4;
13979
13980 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13981 {
13982 record_buf_mem[index_m] = f_ebytes;
13983 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13984 }
13985 }
13986 }
13987 else
13988 {
13989 if (!a_bit)
13990 {
13991 /* Handle VLD1. */
13992 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13993 thumb2_insn_r->reg_rec_count = 1;
13994 /* Handle VLD2. */
13995 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13996 thumb2_insn_r->reg_rec_count = 2;
13997 /* Handle VLD3. */
13998 else if ((b_bits & 0x0e) == 0x04)
13999 thumb2_insn_r->reg_rec_count = 3;
14000 /* Handle VLD4. */
14001 else if (!(b_bits & 0x0e))
14002 thumb2_insn_r->reg_rec_count = 4;
14003 }
14004 else
14005 {
14006 /* Handle VLD1. */
14007 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14008 thumb2_insn_r->reg_rec_count = 1;
14009 /* Handle VLD2. */
14010 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14011 thumb2_insn_r->reg_rec_count = 2;
14012 /* Handle VLD3. */
14013 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14014 thumb2_insn_r->reg_rec_count = 3;
14015 /* Handle VLD4. */
14016 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14017 thumb2_insn_r->reg_rec_count = 4;
14018
14019 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14020 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14021 }
14022 }
14023
14024 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14025 {
14026 record_buf[index_r] = reg_rn;
14027 thumb2_insn_r->reg_rec_count += 1;
14028 }
14029
14030 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14031 record_buf);
14032 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14033 record_buf_mem);
14034 return 0;
14035 }
14036
14037 /* Decodes thumb2 instruction type and invokes its record handler. */
14038
14039 static unsigned int
14040 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14041 {
14042 uint32_t op, op1, op2;
14043
14044 op = bit (thumb2_insn_r->arm_insn, 15);
14045 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14046 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14047
14048 if (op1 == 0x01)
14049 {
14050 if (!(op2 & 0x64 ))
14051 {
14052 /* Load/store multiple instruction. */
14053 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14054 }
14055 else if ((op2 & 0x64) == 0x4)
14056 {
14057 /* Load/store (dual/exclusive) and table branch instruction. */
14058 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14059 }
14060 else if ((op2 & 0x60) == 0x20)
14061 {
14062 /* Data-processing (shifted register). */
14063 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14064 }
14065 else if (op2 & 0x40)
14066 {
14067 /* Co-processor instructions. */
14068 return thumb2_record_coproc_insn (thumb2_insn_r);
14069 }
14070 }
14071 else if (op1 == 0x02)
14072 {
14073 if (op)
14074 {
14075 /* Branches and miscellaneous control instructions. */
14076 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14077 }
14078 else if (op2 & 0x20)
14079 {
14080 /* Data-processing (plain binary immediate) instruction. */
14081 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14082 }
14083 else
14084 {
14085 /* Data-processing (modified immediate). */
14086 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14087 }
14088 }
14089 else if (op1 == 0x03)
14090 {
14091 if (!(op2 & 0x71 ))
14092 {
14093 /* Store single data item. */
14094 return thumb2_record_str_single_data (thumb2_insn_r);
14095 }
14096 else if (!((op2 & 0x71) ^ 0x10))
14097 {
14098 /* Advanced SIMD or structure load/store instructions. */
14099 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14100 }
14101 else if (!((op2 & 0x67) ^ 0x01))
14102 {
14103 /* Load byte, memory hints instruction. */
14104 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14105 }
14106 else if (!((op2 & 0x67) ^ 0x03))
14107 {
14108 /* Load halfword, memory hints instruction. */
14109 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14110 }
14111 else if (!((op2 & 0x67) ^ 0x05))
14112 {
14113 /* Load word instruction. */
14114 return thumb2_record_ld_word (thumb2_insn_r);
14115 }
14116 else if (!((op2 & 0x70) ^ 0x20))
14117 {
14118 /* Data-processing (register) instruction. */
14119 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14120 }
14121 else if (!((op2 & 0x78) ^ 0x30))
14122 {
14123 /* Multiply, multiply accumulate, abs diff instruction. */
14124 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14125 }
14126 else if (!((op2 & 0x78) ^ 0x38))
14127 {
14128 /* Long multiply, long multiply accumulate, and divide. */
14129 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14130 }
14131 else if (op2 & 0x40)
14132 {
14133 /* Co-processor instructions. */
14134 return thumb2_record_coproc_insn (thumb2_insn_r);
14135 }
14136 }
14137
14138 return -1;
14139 }
14140
14141 namespace {
14142 /* Abstract memory reader. */
14143
14144 class abstract_memory_reader
14145 {
14146 public:
14147 /* Read LEN bytes of target memory at address MEMADDR, placing the
14148 results in GDB's memory at BUF. Return true on success. */
14149
14150 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
14151 };
14152
14153 /* Instruction reader from real target. */
14154
14155 class instruction_reader : public abstract_memory_reader
14156 {
14157 public:
14158 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14159 {
14160 if (target_read_memory (memaddr, buf, len))
14161 return false;
14162 else
14163 return true;
14164 }
14165 };
14166
14167 } // namespace
14168
14169 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
14170 and positive val on failure. */
14171
14172 static int
14173 extract_arm_insn (abstract_memory_reader& reader,
14174 arm_insn_decode_record *insn_record, uint32_t insn_size)
14175 {
14176 gdb_byte buf[insn_size];
14177
14178 memset (&buf[0], 0, insn_size);
14179
14180 if (!reader.read (insn_record->this_addr, buf, insn_size))
14181 return 1;
14182 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
14183 insn_size,
14184 gdbarch_byte_order_for_code (insn_record->gdbarch));
14185 return 0;
14186 }
14187
14188 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14189
14190 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14191 dispatch it. */
14192
14193 static int
14194 decode_insn (abstract_memory_reader &reader,
14195 arm_insn_decode_record *arm_record,
14196 record_type_t record_type, uint32_t insn_size)
14197 {
14198
14199 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14200 instruction. */
14201 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14202 {
14203 arm_record_data_proc_misc_ld_str, /* 000. */
14204 arm_record_data_proc_imm, /* 001. */
14205 arm_record_ld_st_imm_offset, /* 010. */
14206 arm_record_ld_st_reg_offset, /* 011. */
14207 arm_record_ld_st_multiple, /* 100. */
14208 arm_record_b_bl, /* 101. */
14209 arm_record_asimd_vfp_coproc, /* 110. */
14210 arm_record_coproc_data_proc /* 111. */
14211 };
14212
14213 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14214 instruction. */
14215 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14216 { \
14217 thumb_record_shift_add_sub, /* 000. */
14218 thumb_record_add_sub_cmp_mov, /* 001. */
14219 thumb_record_ld_st_reg_offset, /* 010. */
14220 thumb_record_ld_st_imm_offset, /* 011. */
14221 thumb_record_ld_st_stack, /* 100. */
14222 thumb_record_misc, /* 101. */
14223 thumb_record_ldm_stm_swi, /* 110. */
14224 thumb_record_branch /* 111. */
14225 };
14226
14227 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14228 uint32_t insn_id = 0;
14229
14230 if (extract_arm_insn (reader, arm_record, insn_size))
14231 {
14232 if (record_debug)
14233 {
14234 gdb_printf (gdb_stdlog,
14235 _("Process record: error reading memory at "
14236 "addr %s len = %d.\n"),
14237 paddress (arm_record->gdbarch,
14238 arm_record->this_addr), insn_size);
14239 }
14240 return -1;
14241 }
14242 else if (ARM_RECORD == record_type)
14243 {
14244 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14245 insn_id = bits (arm_record->arm_insn, 25, 27);
14246
14247 if (arm_record->cond == 0xf)
14248 ret = arm_record_extension_space (arm_record);
14249 else
14250 {
14251 /* If this insn has fallen into extension space
14252 then we need not decode it anymore. */
14253 ret = arm_handle_insn[insn_id] (arm_record);
14254 }
14255 if (ret != ARM_RECORD_SUCCESS)
14256 {
14257 arm_record_unsupported_insn (arm_record);
14258 ret = -1;
14259 }
14260 }
14261 else if (THUMB_RECORD == record_type)
14262 {
14263 /* As thumb does not have condition codes, we set negative. */
14264 arm_record->cond = -1;
14265 insn_id = bits (arm_record->arm_insn, 13, 15);
14266 ret = thumb_handle_insn[insn_id] (arm_record);
14267 if (ret != ARM_RECORD_SUCCESS)
14268 {
14269 arm_record_unsupported_insn (arm_record);
14270 ret = -1;
14271 }
14272 }
14273 else if (THUMB2_RECORD == record_type)
14274 {
14275 /* As thumb does not have condition codes, we set negative. */
14276 arm_record->cond = -1;
14277
14278 /* Swap first half of 32bit thumb instruction with second half. */
14279 arm_record->arm_insn
14280 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14281
14282 ret = thumb2_record_decode_insn_handler (arm_record);
14283
14284 if (ret != ARM_RECORD_SUCCESS)
14285 {
14286 arm_record_unsupported_insn (arm_record);
14287 ret = -1;
14288 }
14289 }
14290 else
14291 {
14292 /* Throw assertion. */
14293 gdb_assert_not_reached ("not a valid instruction, could not decode");
14294 }
14295
14296 return ret;
14297 }
14298
14299 #if GDB_SELF_TEST
14300 namespace selftests {
14301
14302 /* Provide both 16-bit and 32-bit thumb instructions. */
14303
14304 class instruction_reader_thumb : public abstract_memory_reader
14305 {
14306 public:
14307 template<size_t SIZE>
14308 instruction_reader_thumb (enum bfd_endian endian,
14309 const uint16_t (&insns)[SIZE])
14310 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
14311 {}
14312
14313 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
14314 {
14315 SELF_CHECK (len == 4 || len == 2);
14316 SELF_CHECK (memaddr % 2 == 0);
14317 SELF_CHECK ((memaddr / 2) < m_insns_size);
14318
14319 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
14320 if (len == 4)
14321 {
14322 store_unsigned_integer (&buf[2], 2, m_endian,
14323 m_insns[memaddr / 2 + 1]);
14324 }
14325 return true;
14326 }
14327
14328 private:
14329 enum bfd_endian m_endian;
14330 const uint16_t *m_insns;
14331 size_t m_insns_size;
14332 };
14333
14334 static void
14335 arm_record_test (void)
14336 {
14337 struct gdbarch_info info;
14338 info.bfd_arch_info = bfd_scan_arch ("arm");
14339
14340 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14341
14342 SELF_CHECK (gdbarch != NULL);
14343
14344 /* 16-bit Thumb instructions. */
14345 {
14346 arm_insn_decode_record arm_record;
14347
14348 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14349 arm_record.gdbarch = gdbarch;
14350
14351 static const uint16_t insns[] = {
14352 /* db b2 uxtb r3, r3 */
14353 0xb2db,
14354 /* cd 58 ldr r5, [r1, r3] */
14355 0x58cd,
14356 };
14357
14358 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14359 instruction_reader_thumb reader (endian, insns);
14360 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14361 THUMB_INSN_SIZE_BYTES);
14362
14363 SELF_CHECK (ret == 0);
14364 SELF_CHECK (arm_record.mem_rec_count == 0);
14365 SELF_CHECK (arm_record.reg_rec_count == 1);
14366 SELF_CHECK (arm_record.arm_regs[0] == 3);
14367
14368 arm_record.this_addr += 2;
14369 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14370 THUMB_INSN_SIZE_BYTES);
14371
14372 SELF_CHECK (ret == 0);
14373 SELF_CHECK (arm_record.mem_rec_count == 0);
14374 SELF_CHECK (arm_record.reg_rec_count == 1);
14375 SELF_CHECK (arm_record.arm_regs[0] == 5);
14376 }
14377
14378 /* 32-bit Thumb-2 instructions. */
14379 {
14380 arm_insn_decode_record arm_record;
14381
14382 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14383 arm_record.gdbarch = gdbarch;
14384
14385 static const uint16_t insns[] = {
14386 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
14387 0xee1d, 0x7f70,
14388 };
14389
14390 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
14391 instruction_reader_thumb reader (endian, insns);
14392 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14393 THUMB2_INSN_SIZE_BYTES);
14394
14395 SELF_CHECK (ret == 0);
14396 SELF_CHECK (arm_record.mem_rec_count == 0);
14397 SELF_CHECK (arm_record.reg_rec_count == 1);
14398 SELF_CHECK (arm_record.arm_regs[0] == 7);
14399 }
14400 }
14401
14402 /* Instruction reader from manually cooked instruction sequences. */
14403
14404 class test_arm_instruction_reader : public arm_instruction_reader
14405 {
14406 public:
14407 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14408 : m_insns (insns)
14409 {}
14410
14411 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14412 {
14413 SELF_CHECK (memaddr % 4 == 0);
14414 SELF_CHECK (memaddr / 4 < m_insns.size ());
14415
14416 return m_insns[memaddr / 4];
14417 }
14418
14419 private:
14420 const gdb::array_view<const uint32_t> m_insns;
14421 };
14422
14423 static void
14424 arm_analyze_prologue_test ()
14425 {
14426 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14427 {
14428 struct gdbarch_info info;
14429 info.byte_order = endianness;
14430 info.byte_order_for_code = endianness;
14431 info.bfd_arch_info = bfd_scan_arch ("arm");
14432
14433 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14434
14435 SELF_CHECK (gdbarch != NULL);
14436
14437 /* The "sub" instruction contains an immediate value rotate count of 0,
14438 which resulted in a 32-bit shift of a 32-bit value, caught by
14439 UBSan. */
14440 const uint32_t insns[] = {
14441 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14442 0xe1a05000, /* mov r5, r0 */
14443 0xe5903020, /* ldr r3, [r0, #32] */
14444 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14445 };
14446
14447 test_arm_instruction_reader mem_reader (insns);
14448 arm_prologue_cache cache;
14449 arm_cache_init (&cache, gdbarch);
14450
14451 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14452 }
14453 }
14454
14455 } // namespace selftests
14456 #endif /* GDB_SELF_TEST */
14457
14458 /* Cleans up local record registers and memory allocations. */
14459
14460 static void
14461 deallocate_reg_mem (arm_insn_decode_record *record)
14462 {
14463 xfree (record->arm_regs);
14464 xfree (record->arm_mems);
14465 }
14466
14467
14468 /* Parse the current instruction and record the values of the registers and
14469 memory that will be changed in current instruction to record_arch_list".
14470 Return -1 if something is wrong. */
14471
14472 int
14473 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14474 CORE_ADDR insn_addr)
14475 {
14476
14477 uint32_t no_of_rec = 0;
14478 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14479 ULONGEST t_bit = 0, insn_id = 0;
14480
14481 ULONGEST u_regval = 0;
14482
14483 arm_insn_decode_record arm_record;
14484
14485 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14486 arm_record.regcache = regcache;
14487 arm_record.this_addr = insn_addr;
14488 arm_record.gdbarch = gdbarch;
14489
14490
14491 if (record_debug > 1)
14492 {
14493 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14494 "addr = %s\n",
14495 paddress (gdbarch, arm_record.this_addr));
14496 }
14497
14498 instruction_reader reader;
14499 if (extract_arm_insn (reader, &arm_record, 2))
14500 {
14501 if (record_debug)
14502 {
14503 gdb_printf (gdb_stdlog,
14504 _("Process record: error reading memory at "
14505 "addr %s len = %d.\n"),
14506 paddress (arm_record.gdbarch,
14507 arm_record.this_addr), 2);
14508 }
14509 return -1;
14510 }
14511
14512 /* Check the insn, whether it is thumb or arm one. */
14513
14514 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14515 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14516
14517
14518 if (!(u_regval & t_bit))
14519 {
14520 /* We are decoding arm insn. */
14521 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14522 }
14523 else
14524 {
14525 insn_id = bits (arm_record.arm_insn, 11, 15);
14526 /* is it thumb2 insn? */
14527 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14528 {
14529 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14530 THUMB2_INSN_SIZE_BYTES);
14531 }
14532 else
14533 {
14534 /* We are decoding thumb insn. */
14535 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14536 THUMB_INSN_SIZE_BYTES);
14537 }
14538 }
14539
14540 if (0 == ret)
14541 {
14542 /* Record registers. */
14543 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14544 if (arm_record.arm_regs)
14545 {
14546 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14547 {
14548 if (record_full_arch_list_add_reg
14549 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14550 ret = -1;
14551 }
14552 }
14553 /* Record memories. */
14554 if (arm_record.arm_mems)
14555 {
14556 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14557 {
14558 if (record_full_arch_list_add_mem
14559 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14560 arm_record.arm_mems[no_of_rec].len))
14561 ret = -1;
14562 }
14563 }
14564
14565 if (record_full_arch_list_add_end ())
14566 ret = -1;
14567 }
14568
14569
14570 deallocate_reg_mem (&arm_record);
14571
14572 return ret;
14573 }
14574
14575 /* See arm-tdep.h. */
14576
14577 const target_desc *
14578 arm_read_description (arm_fp_type fp_type, bool tls)
14579 {
14580 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14581
14582 if (tdesc == nullptr)
14583 {
14584 tdesc = arm_create_target_description (fp_type, tls);
14585 tdesc_arm_list[fp_type][tls] = tdesc;
14586 }
14587
14588 return tdesc;
14589 }
14590
14591 /* See arm-tdep.h. */
14592
14593 const target_desc *
14594 arm_read_mprofile_description (arm_m_profile_type m_type)
14595 {
14596 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14597
14598 if (tdesc == nullptr)
14599 {
14600 tdesc = arm_create_mprofile_target_description (m_type);
14601 tdesc_arm_mprofile_list[m_type] = tdesc;
14602 }
14603
14604 return tdesc;
14605 }