gdb: make gdbarch_register_reggroup_p take a const reggroup *
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2.h"
42 #include "dwarf2/frame.h"
43 #include "gdbtypes.h"
44 #include "prologue-value.h"
45 #include "remote.h"
46 #include "target-descriptions.h"
47 #include "user-regs.h"
48 #include "observable.h"
49 #include "count-one-bits.h"
50
51 #include "arch/arm.h"
52 #include "arch/arm-get-next-pcs.h"
53 #include "arm-tdep.h"
54 #include "gdb/sim-arm.h"
55
56 #include "elf-bfd.h"
57 #include "coff/internal.h"
58 #include "elf/arm.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "producer.h"
65
66 #if GDB_SELF_TEST
67 #include "gdbsupport/selftest.h"
68 #endif
69
70 static bool arm_debug;
71
72 /* Print an "arm" debug statement. */
73
74 #define arm_debug_printf(fmt, ...) \
75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76
77 /* Macros for setting and testing a bit in a minimal symbol that marks
78 it as Thumb function. The MSB of the minimal symbol's "info" field
79 is used for this purpose.
80
81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83
84 #define MSYMBOL_SET_SPECIAL(msym) \
85 MSYMBOL_TARGET_FLAG_1 (msym) = 1
86
87 #define MSYMBOL_IS_SPECIAL(msym) \
88 MSYMBOL_TARGET_FLAG_1 (msym)
89
90 struct arm_mapping_symbol
91 {
92 CORE_ADDR value;
93 char type;
94
95 bool operator< (const arm_mapping_symbol &other) const
96 { return this->value < other.value; }
97 };
98
99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
100
101 struct arm_per_bfd
102 {
103 explicit arm_per_bfd (size_t num_sections)
104 : section_maps (new arm_mapping_symbol_vec[num_sections]),
105 section_maps_sorted (new bool[num_sections] ())
106 {}
107
108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109
110 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111
112 The format is an array of vectors of arm_mapping_symbols, there is one
113 vector for each section of the objfile (the array is index by BFD section
114 index).
115
116 For each section, the vector of arm_mapping_symbol is sorted by
117 symbol value (address). */
118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119
120 /* For each corresponding element of section_maps above, is this vector
121 sorted. */
122 std::unique_ptr<bool[]> section_maps_sorted;
123 };
124
125 /* Per-bfd data used for mapping symbols. */
126 static bfd_key<arm_per_bfd> arm_bfd_data_key;
127
128 /* The list of available "set arm ..." and "show arm ..." commands. */
129 static struct cmd_list_element *setarmcmdlist = NULL;
130 static struct cmd_list_element *showarmcmdlist = NULL;
131
132 /* The type of floating-point to use. Keep this in sync with enum
133 arm_float_model, and the help string in _initialize_arm_tdep. */
134 static const char *const fp_model_strings[] =
135 {
136 "auto",
137 "softfpa",
138 "fpa",
139 "softvfp",
140 "vfp",
141 NULL
142 };
143
144 /* A variable that can be configured by the user. */
145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
146 static const char *current_fp_model = "auto";
147
148 /* The ABI to use. Keep this in sync with arm_abi_kind. */
149 static const char *const arm_abi_strings[] =
150 {
151 "auto",
152 "APCS",
153 "AAPCS",
154 NULL
155 };
156
157 /* A variable that can be configured by the user. */
158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
159 static const char *arm_abi_string = "auto";
160
161 /* The execution mode to assume. */
162 static const char *const arm_mode_strings[] =
163 {
164 "auto",
165 "arm",
166 "thumb",
167 NULL
168 };
169
170 static const char *arm_fallback_mode_string = "auto";
171 static const char *arm_force_mode_string = "auto";
172
173 /* The standard register names, and all the valid aliases for them. Note
174 that `fp', `sp' and `pc' are not added in this alias list, because they
175 have been added as builtin user registers in
176 std-regs.c:_initialize_frame_reg. */
177 static const struct
178 {
179 const char *name;
180 int regnum;
181 } arm_register_aliases[] = {
182 /* Basic register numbers. */
183 { "r0", 0 },
184 { "r1", 1 },
185 { "r2", 2 },
186 { "r3", 3 },
187 { "r4", 4 },
188 { "r5", 5 },
189 { "r6", 6 },
190 { "r7", 7 },
191 { "r8", 8 },
192 { "r9", 9 },
193 { "r10", 10 },
194 { "r11", 11 },
195 { "r12", 12 },
196 { "r13", 13 },
197 { "r14", 14 },
198 { "r15", 15 },
199 /* Synonyms (argument and variable registers). */
200 { "a1", 0 },
201 { "a2", 1 },
202 { "a3", 2 },
203 { "a4", 3 },
204 { "v1", 4 },
205 { "v2", 5 },
206 { "v3", 6 },
207 { "v4", 7 },
208 { "v5", 8 },
209 { "v6", 9 },
210 { "v7", 10 },
211 { "v8", 11 },
212 /* Other platform-specific names for r9. */
213 { "sb", 9 },
214 { "tr", 9 },
215 /* Special names. */
216 { "ip", 12 },
217 { "lr", 14 },
218 /* Names used by GCC (not listed in the ARM EABI). */
219 { "sl", 10 },
220 /* A special name from the older ATPCS. */
221 { "wr", 7 },
222 };
223
224 static const char *const arm_register_names[] =
225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
231 "fps", "cpsr" }; /* 24 25 */
232
233 /* Holds the current set of options to be passed to the disassembler. */
234 static char *arm_disassembler_options;
235
236 /* Valid register name styles. */
237 static const char **valid_disassembly_styles;
238
239 /* Disassembly style to use. Default to "std" register names. */
240 static const char *disassembly_style;
241
242 /* All possible arm target descriptors. */
243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245
246 /* This is used to keep the bfd arch_info in sync with the disassembly
247 style. */
248 static void set_disassembly_style_sfunc (const char *, int,
249 struct cmd_list_element *);
250 static void show_disassembly_style_sfunc (struct ui_file *, int,
251 struct cmd_list_element *,
252 const char *);
253
254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
255 readable_regcache *regcache,
256 int regnum, gdb_byte *buf);
257 static void arm_neon_quad_write (struct gdbarch *gdbarch,
258 struct regcache *regcache,
259 int regnum, const gdb_byte *buf);
260
261 static CORE_ADDR
262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
263
264
265 /* get_next_pcs operations. */
266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
267 arm_get_next_pcs_read_memory_unsigned_integer,
268 arm_get_next_pcs_syscall_next_pc,
269 arm_get_next_pcs_addr_bits_remove,
270 arm_get_next_pcs_is_thumb,
271 NULL,
272 };
273
274 struct arm_prologue_cache
275 {
276 /* The stack pointer at the time this frame was created; i.e. the
277 caller's stack pointer when this function was called. It is used
278 to identify this frame. */
279 CORE_ADDR prev_sp;
280
281 /* The frame base for this frame is just prev_sp - frame size.
282 FRAMESIZE is the distance from the frame pointer to the
283 initial stack pointer. */
284
285 int framesize;
286
287 /* The register used to hold the frame pointer for this frame. */
288 int framereg;
289
290 /* True if the return address is signed, false otherwise. */
291 gdb::optional<bool> ra_signed_state;
292
293 /* Saved register offsets. */
294 trad_frame_saved_reg *saved_regs;
295 };
296
297 namespace {
298
299 /* Abstract class to read ARM instructions from memory. */
300
301 class arm_instruction_reader
302 {
303 public:
304 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
305 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
306 };
307
308 /* Read instructions from target memory. */
309
310 class target_arm_instruction_reader : public arm_instruction_reader
311 {
312 public:
313 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
314 {
315 return read_code_unsigned_integer (memaddr, 4, byte_order);
316 }
317 };
318
319 } /* namespace */
320
321 static CORE_ADDR arm_analyze_prologue
322 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
323 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
324
325 /* Architecture version for displaced stepping. This effects the behaviour of
326 certain instructions, and really should not be hard-wired. */
327
328 #define DISPLACED_STEPPING_ARCH_VERSION 5
329
330 /* See arm-tdep.h. */
331
332 bool arm_apcs_32 = true;
333
334 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
335
336 int
337 arm_psr_thumb_bit (struct gdbarch *gdbarch)
338 {
339 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
340
341 if (tdep->is_m)
342 return XPSR_T;
343 else
344 return CPSR_T;
345 }
346
347 /* Determine if the processor is currently executing in Thumb mode. */
348
349 int
350 arm_is_thumb (struct regcache *regcache)
351 {
352 ULONGEST cpsr;
353 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
354
355 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
356
357 return (cpsr & t_bit) != 0;
358 }
359
360 /* Determine if FRAME is executing in Thumb mode. */
361
362 int
363 arm_frame_is_thumb (struct frame_info *frame)
364 {
365 CORE_ADDR cpsr;
366 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
367
368 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
369 directly (from a signal frame or dummy frame) or by interpreting
370 the saved LR (from a prologue or DWARF frame). So consult it and
371 trust the unwinders. */
372 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
373
374 return (cpsr & t_bit) != 0;
375 }
376
377 /* Search for the mapping symbol covering MEMADDR. If one is found,
378 return its type. Otherwise, return 0. If START is non-NULL,
379 set *START to the location of the mapping symbol. */
380
381 static char
382 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
383 {
384 struct obj_section *sec;
385
386 /* If there are mapping symbols, consult them. */
387 sec = find_pc_section (memaddr);
388 if (sec != NULL)
389 {
390 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
391 if (data != NULL)
392 {
393 unsigned int section_idx = sec->the_bfd_section->index;
394 arm_mapping_symbol_vec &map
395 = data->section_maps[section_idx];
396
397 /* Sort the vector on first use. */
398 if (!data->section_maps_sorted[section_idx])
399 {
400 std::sort (map.begin (), map.end ());
401 data->section_maps_sorted[section_idx] = true;
402 }
403
404 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
405 arm_mapping_symbol_vec::const_iterator it
406 = std::lower_bound (map.begin (), map.end (), map_key);
407
408 /* std::lower_bound finds the earliest ordered insertion
409 point. If the symbol at this position starts at this exact
410 address, we use that; otherwise, the preceding
411 mapping symbol covers this address. */
412 if (it < map.end ())
413 {
414 if (it->value == map_key.value)
415 {
416 if (start)
417 *start = it->value + sec->addr ();
418 return it->type;
419 }
420 }
421
422 if (it > map.begin ())
423 {
424 arm_mapping_symbol_vec::const_iterator prev_it
425 = it - 1;
426
427 if (start)
428 *start = prev_it->value + sec->addr ();
429 return prev_it->type;
430 }
431 }
432 }
433
434 return 0;
435 }
436
437 /* Determine if the program counter specified in MEMADDR is in a Thumb
438 function. This function should be called for addresses unrelated to
439 any executing frame; otherwise, prefer arm_frame_is_thumb. */
440
441 int
442 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
443 {
444 struct bound_minimal_symbol sym;
445 char type;
446 arm_displaced_step_copy_insn_closure *dsc = nullptr;
447 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
448
449 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
450 dsc = ((arm_displaced_step_copy_insn_closure * )
451 gdbarch_displaced_step_copy_insn_closure_by_addr
452 (gdbarch, current_inferior (), memaddr));
453
454 /* If checking the mode of displaced instruction in copy area, the mode
455 should be determined by instruction on the original address. */
456 if (dsc)
457 {
458 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
459 (unsigned long) dsc->insn_addr,
460 (unsigned long) memaddr);
461 memaddr = dsc->insn_addr;
462 }
463
464 /* If bit 0 of the address is set, assume this is a Thumb address. */
465 if (IS_THUMB_ADDR (memaddr))
466 return 1;
467
468 /* If the user wants to override the symbol table, let him. */
469 if (strcmp (arm_force_mode_string, "arm") == 0)
470 return 0;
471 if (strcmp (arm_force_mode_string, "thumb") == 0)
472 return 1;
473
474 /* ARM v6-M and v7-M are always in Thumb mode. */
475 if (tdep->is_m)
476 return 1;
477
478 /* If there are mapping symbols, consult them. */
479 type = arm_find_mapping_symbol (memaddr, NULL);
480 if (type)
481 return type == 't';
482
483 /* Thumb functions have a "special" bit set in minimal symbols. */
484 sym = lookup_minimal_symbol_by_pc (memaddr);
485 if (sym.minsym)
486 return (MSYMBOL_IS_SPECIAL (sym.minsym));
487
488 /* If the user wants to override the fallback mode, let them. */
489 if (strcmp (arm_fallback_mode_string, "arm") == 0)
490 return 0;
491 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
492 return 1;
493
494 /* If we couldn't find any symbol, but we're talking to a running
495 target, then trust the current value of $cpsr. This lets
496 "display/i $pc" always show the correct mode (though if there is
497 a symbol table we will not reach here, so it still may not be
498 displayed in the mode it will be executed). */
499 if (target_has_registers ())
500 return arm_frame_is_thumb (get_current_frame ());
501
502 /* Otherwise we're out of luck; we assume ARM. */
503 return 0;
504 }
505
506 /* Determine if the address specified equals any of these magic return
507 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
508 architectures.
509
510 From ARMv6-M Reference Manual B1.5.8
511 Table B1-5 Exception return behavior
512
513 EXC_RETURN Return To Return Stack
514 0xFFFFFFF1 Handler mode Main
515 0xFFFFFFF9 Thread mode Main
516 0xFFFFFFFD Thread mode Process
517
518 From ARMv7-M Reference Manual B1.5.8
519 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
520
521 EXC_RETURN Return To Return Stack
522 0xFFFFFFF1 Handler mode Main
523 0xFFFFFFF9 Thread mode Main
524 0xFFFFFFFD Thread mode Process
525
526 Table B1-9 EXC_RETURN definition of exception return behavior, with
527 FP
528
529 EXC_RETURN Return To Return Stack Frame Type
530 0xFFFFFFE1 Handler mode Main Extended
531 0xFFFFFFE9 Thread mode Main Extended
532 0xFFFFFFED Thread mode Process Extended
533 0xFFFFFFF1 Handler mode Main Basic
534 0xFFFFFFF9 Thread mode Main Basic
535 0xFFFFFFFD Thread mode Process Basic
536
537 For more details see "B1.5.8 Exception return behavior"
538 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
539
540 In the ARMv8-M Architecture Technical Reference also adds
541 for implementations without the Security Extension:
542
543 EXC_RETURN Condition
544 0xFFFFFFB0 Return to Handler mode.
545 0xFFFFFFB8 Return to Thread mode using the main stack.
546 0xFFFFFFBC Return to Thread mode using the process stack. */
547
548 static int
549 arm_m_addr_is_magic (CORE_ADDR addr)
550 {
551 switch (addr)
552 {
553 /* Values from ARMv8-M Architecture Technical Reference. */
554 case 0xffffffb0:
555 case 0xffffffb8:
556 case 0xffffffbc:
557 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
558 the exception return behavior. */
559 case 0xffffffe1:
560 case 0xffffffe9:
561 case 0xffffffed:
562 case 0xfffffff1:
563 case 0xfffffff9:
564 case 0xfffffffd:
565 /* Address is magic. */
566 return 1;
567
568 default:
569 /* Address is not magic. */
570 return 0;
571 }
572 }
573
574 /* Remove useless bits from addresses in a running program. */
575 static CORE_ADDR
576 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
577 {
578 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
579
580 /* On M-profile devices, do not strip the low bit from EXC_RETURN
581 (the magic exception return address). */
582 if (tdep->is_m && arm_m_addr_is_magic (val))
583 return val;
584
585 if (arm_apcs_32)
586 return UNMAKE_THUMB_ADDR (val);
587 else
588 return (val & 0x03fffffc);
589 }
590
591 /* Return 1 if PC is the start of a compiler helper function which
592 can be safely ignored during prologue skipping. IS_THUMB is true
593 if the function is known to be a Thumb function due to the way it
594 is being called. */
595 static int
596 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
597 {
598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
599 struct bound_minimal_symbol msym;
600
601 msym = lookup_minimal_symbol_by_pc (pc);
602 if (msym.minsym != NULL
603 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
604 && msym.minsym->linkage_name () != NULL)
605 {
606 const char *name = msym.minsym->linkage_name ();
607
608 /* The GNU linker's Thumb call stub to foo is named
609 __foo_from_thumb. */
610 if (strstr (name, "_from_thumb") != NULL)
611 name += 2;
612
613 /* On soft-float targets, __truncdfsf2 is called to convert promoted
614 arguments to their argument types in non-prototyped
615 functions. */
616 if (startswith (name, "__truncdfsf2"))
617 return 1;
618 if (startswith (name, "__aeabi_d2f"))
619 return 1;
620
621 /* Internal functions related to thread-local storage. */
622 if (startswith (name, "__tls_get_addr"))
623 return 1;
624 if (startswith (name, "__aeabi_read_tp"))
625 return 1;
626 }
627 else
628 {
629 /* If we run against a stripped glibc, we may be unable to identify
630 special functions by name. Check for one important case,
631 __aeabi_read_tp, by comparing the *code* against the default
632 implementation (this is hand-written ARM assembler in glibc). */
633
634 if (!is_thumb
635 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
636 == 0xe3e00a0f /* mov r0, #0xffff0fff */
637 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
638 == 0xe240f01f) /* sub pc, r0, #31 */
639 return 1;
640 }
641
642 return 0;
643 }
644
645 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
646 the first 16-bit of instruction, and INSN2 is the second 16-bit of
647 instruction. */
648 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
649 ((bits ((insn1), 0, 3) << 12) \
650 | (bits ((insn1), 10, 10) << 11) \
651 | (bits ((insn2), 12, 14) << 8) \
652 | bits ((insn2), 0, 7))
653
654 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
655 the 32-bit instruction. */
656 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
657 ((bits ((insn), 16, 19) << 12) \
658 | bits ((insn), 0, 11))
659
660 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
661
662 static unsigned int
663 thumb_expand_immediate (unsigned int imm)
664 {
665 unsigned int count = imm >> 7;
666
667 if (count < 8)
668 switch (count / 2)
669 {
670 case 0:
671 return imm & 0xff;
672 case 1:
673 return (imm & 0xff) | ((imm & 0xff) << 16);
674 case 2:
675 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
676 case 3:
677 return (imm & 0xff) | ((imm & 0xff) << 8)
678 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
679 }
680
681 return (0x80 | (imm & 0x7f)) << (32 - count);
682 }
683
684 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
685 epilogue, 0 otherwise. */
686
687 static int
688 thumb_instruction_restores_sp (unsigned short insn)
689 {
690 return (insn == 0x46bd /* mov sp, r7 */
691 || (insn & 0xff80) == 0xb000 /* add sp, imm */
692 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
693 }
694
695 /* Analyze a Thumb prologue, looking for a recognizable stack frame
696 and frame pointer. Scan until we encounter a store that could
697 clobber the stack frame unexpectedly, or an unknown instruction.
698 Return the last address which is definitely safe to skip for an
699 initial breakpoint. */
700
701 static CORE_ADDR
702 thumb_analyze_prologue (struct gdbarch *gdbarch,
703 CORE_ADDR start, CORE_ADDR limit,
704 struct arm_prologue_cache *cache)
705 {
706 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
707 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
708 int i;
709 pv_t regs[16];
710 CORE_ADDR offset;
711 CORE_ADDR unrecognized_pc = 0;
712
713 for (i = 0; i < 16; i++)
714 regs[i] = pv_register (i, 0);
715 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
716
717 while (start < limit)
718 {
719 unsigned short insn;
720 gdb::optional<bool> ra_signed_state;
721
722 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
723
724 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
725 {
726 int regno;
727 int mask;
728
729 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
730 break;
731
732 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
733 whether to save LR (R14). */
734 mask = (insn & 0xff) | ((insn & 0x100) << 6);
735
736 /* Calculate offsets of saved R0-R7 and LR. */
737 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
738 if (mask & (1 << regno))
739 {
740 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
741 -4);
742 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
743 }
744 }
745 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
746 {
747 offset = (insn & 0x7f) << 2; /* get scaled offset */
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
749 -offset);
750 }
751 else if (thumb_instruction_restores_sp (insn))
752 {
753 /* Don't scan past the epilogue. */
754 break;
755 }
756 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
757 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
758 (insn & 0xff) << 2);
759 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
760 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
761 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
762 bits (insn, 6, 8));
763 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
764 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
765 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
766 bits (insn, 0, 7));
767 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
768 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
769 && pv_is_constant (regs[bits (insn, 3, 5)]))
770 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
771 regs[bits (insn, 6, 8)]);
772 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
773 && pv_is_constant (regs[bits (insn, 3, 6)]))
774 {
775 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
776 int rm = bits (insn, 3, 6);
777 regs[rd] = pv_add (regs[rd], regs[rm]);
778 }
779 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
780 {
781 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
782 int src_reg = (insn & 0x78) >> 3;
783 regs[dst_reg] = regs[src_reg];
784 }
785 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
786 {
787 /* Handle stores to the stack. Normally pushes are used,
788 but with GCC -mtpcs-frame, there may be other stores
789 in the prologue to create the frame. */
790 int regno = (insn >> 8) & 0x7;
791 pv_t addr;
792
793 offset = (insn & 0xff) << 2;
794 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
795
796 if (stack.store_would_trash (addr))
797 break;
798
799 stack.store (addr, 4, regs[regno]);
800 }
801 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
802 {
803 int rd = bits (insn, 0, 2);
804 int rn = bits (insn, 3, 5);
805 pv_t addr;
806
807 offset = bits (insn, 6, 10) << 2;
808 addr = pv_add_constant (regs[rn], offset);
809
810 if (stack.store_would_trash (addr))
811 break;
812
813 stack.store (addr, 4, regs[rd]);
814 }
815 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
816 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
817 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
818 /* Ignore stores of argument registers to the stack. */
819 ;
820 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
821 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
822 /* Ignore block loads from the stack, potentially copying
823 parameters from memory. */
824 ;
825 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
826 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
827 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
828 /* Similarly ignore single loads from the stack. */
829 ;
830 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
831 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
832 /* Skip register copies, i.e. saves to another register
833 instead of the stack. */
834 ;
835 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
836 /* Recognize constant loads; even with small stacks these are necessary
837 on Thumb. */
838 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
839 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
840 {
841 /* Constant pool loads, for the same reason. */
842 unsigned int constant;
843 CORE_ADDR loc;
844
845 loc = start + 4 + bits (insn, 0, 7) * 4;
846 constant = read_memory_unsigned_integer (loc, 4, byte_order);
847 regs[bits (insn, 8, 10)] = pv_constant (constant);
848 }
849 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
850 {
851 unsigned short inst2;
852
853 inst2 = read_code_unsigned_integer (start + 2, 2,
854 byte_order_for_code);
855 uint32_t whole_insn = (insn << 16) | inst2;
856
857 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
858 {
859 /* BL, BLX. Allow some special function calls when
860 skipping the prologue; GCC generates these before
861 storing arguments to the stack. */
862 CORE_ADDR nextpc;
863 int j1, j2, imm1, imm2;
864
865 imm1 = sbits (insn, 0, 10);
866 imm2 = bits (inst2, 0, 10);
867 j1 = bit (inst2, 13);
868 j2 = bit (inst2, 11);
869
870 offset = ((imm1 << 12) + (imm2 << 1));
871 offset ^= ((!j2) << 22) | ((!j1) << 23);
872
873 nextpc = start + 4 + offset;
874 /* For BLX make sure to clear the low bits. */
875 if (bit (inst2, 12) == 0)
876 nextpc = nextpc & 0xfffffffc;
877
878 if (!skip_prologue_function (gdbarch, nextpc,
879 bit (inst2, 12) != 0))
880 break;
881 }
882
883 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
884 { registers } */
885 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
886 {
887 pv_t addr = regs[bits (insn, 0, 3)];
888 int regno;
889
890 if (stack.store_would_trash (addr))
891 break;
892
893 /* Calculate offsets of saved registers. */
894 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
895 if (inst2 & (1 << regno))
896 {
897 addr = pv_add_constant (addr, -4);
898 stack.store (addr, 4, regs[regno]);
899 }
900
901 if (insn & 0x0020)
902 regs[bits (insn, 0, 3)] = addr;
903 }
904
905 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
906 [Rn, #+/-imm]{!} */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 {
909 int regno1 = bits (inst2, 12, 15);
910 int regno2 = bits (inst2, 8, 11);
911 pv_t addr = regs[bits (insn, 0, 3)];
912
913 offset = inst2 & 0xff;
914 if (insn & 0x0080)
915 addr = pv_add_constant (addr, offset);
916 else
917 addr = pv_add_constant (addr, -offset);
918
919 if (stack.store_would_trash (addr))
920 break;
921
922 stack.store (addr, 4, regs[regno1]);
923 stack.store (pv_add_constant (addr, 4),
924 4, regs[regno2]);
925
926 if (insn & 0x0020)
927 regs[bits (insn, 0, 3)] = addr;
928 }
929
930 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
931 && (inst2 & 0x0c00) == 0x0c00
932 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
933 {
934 int regno = bits (inst2, 12, 15);
935 pv_t addr = regs[bits (insn, 0, 3)];
936
937 offset = inst2 & 0xff;
938 if (inst2 & 0x0200)
939 addr = pv_add_constant (addr, offset);
940 else
941 addr = pv_add_constant (addr, -offset);
942
943 if (stack.store_would_trash (addr))
944 break;
945
946 stack.store (addr, 4, regs[regno]);
947
948 if (inst2 & 0x0100)
949 regs[bits (insn, 0, 3)] = addr;
950 }
951
952 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
953 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
954 {
955 int regno = bits (inst2, 12, 15);
956 pv_t addr;
957
958 offset = inst2 & 0xfff;
959 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
960
961 if (stack.store_would_trash (addr))
962 break;
963
964 stack.store (addr, 4, regs[regno]);
965 }
966
967 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
970 ;
971
972 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
973 && (inst2 & 0x0d00) == 0x0c00
974 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
975 /* Ignore stores of argument registers to the stack. */
976 ;
977
978 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
979 { registers } */
980 && (inst2 & 0x8000) == 0x0000
981 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
982 /* Ignore block loads from the stack, potentially copying
983 parameters from memory. */
984 ;
985
986 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
987 [Rn, #+/-imm] */
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore dual loads from the stack. */
990 ;
991
992 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
993 && (inst2 & 0x0d00) == 0x0c00
994 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
995 /* Similarly ignore single loads from the stack. */
996 ;
997
998 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
999 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1000 /* Similarly ignore single loads from the stack. */
1001 ;
1002
1003 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1004 && (inst2 & 0x8000) == 0x0000)
1005 {
1006 unsigned int imm = ((bits (insn, 10, 10) << 11)
1007 | (bits (inst2, 12, 14) << 8)
1008 | bits (inst2, 0, 7));
1009
1010 regs[bits (inst2, 8, 11)]
1011 = pv_add_constant (regs[bits (insn, 0, 3)],
1012 thumb_expand_immediate (imm));
1013 }
1014
1015 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1016 && (inst2 & 0x8000) == 0x0000)
1017 {
1018 unsigned int imm = ((bits (insn, 10, 10) << 11)
1019 | (bits (inst2, 12, 14) << 8)
1020 | bits (inst2, 0, 7));
1021
1022 regs[bits (inst2, 8, 11)]
1023 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1024 }
1025
1026 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1027 && (inst2 & 0x8000) == 0x0000)
1028 {
1029 unsigned int imm = ((bits (insn, 10, 10) << 11)
1030 | (bits (inst2, 12, 14) << 8)
1031 | bits (inst2, 0, 7));
1032
1033 regs[bits (inst2, 8, 11)]
1034 = pv_add_constant (regs[bits (insn, 0, 3)],
1035 - (CORE_ADDR) thumb_expand_immediate (imm));
1036 }
1037
1038 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1039 && (inst2 & 0x8000) == 0x0000)
1040 {
1041 unsigned int imm = ((bits (insn, 10, 10) << 11)
1042 | (bits (inst2, 12, 14) << 8)
1043 | bits (inst2, 0, 7));
1044
1045 regs[bits (inst2, 8, 11)]
1046 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1047 }
1048
1049 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1050 {
1051 unsigned int imm = ((bits (insn, 10, 10) << 11)
1052 | (bits (inst2, 12, 14) << 8)
1053 | bits (inst2, 0, 7));
1054
1055 regs[bits (inst2, 8, 11)]
1056 = pv_constant (thumb_expand_immediate (imm));
1057 }
1058
1059 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1060 {
1061 unsigned int imm
1062 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1063
1064 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1065 }
1066
1067 else if (insn == 0xea5f /* mov.w Rd,Rm */
1068 && (inst2 & 0xf0f0) == 0)
1069 {
1070 int dst_reg = (inst2 & 0x0f00) >> 8;
1071 int src_reg = inst2 & 0xf;
1072 regs[dst_reg] = regs[src_reg];
1073 }
1074
1075 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1076 {
1077 /* Constant pool loads. */
1078 unsigned int constant;
1079 CORE_ADDR loc;
1080
1081 offset = bits (inst2, 0, 11);
1082 if (insn & 0x0080)
1083 loc = start + 4 + offset;
1084 else
1085 loc = start + 4 - offset;
1086
1087 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1088 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1089 }
1090
1091 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1092 {
1093 /* Constant pool loads. */
1094 unsigned int constant;
1095 CORE_ADDR loc;
1096
1097 offset = bits (inst2, 0, 7) << 2;
1098 if (insn & 0x0080)
1099 loc = start + 4 + offset;
1100 else
1101 loc = start + 4 - offset;
1102
1103 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1104 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1105
1106 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1107 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1108 }
1109 /* Start of ARMv8.1-m PACBTI extension instructions. */
1110 else if (IS_PAC (whole_insn))
1111 {
1112 /* LR and SP are input registers. PAC is in R12. LR is
1113 signed from this point onwards. NOP space. */
1114 ra_signed_state = true;
1115 }
1116 else if (IS_PACBTI (whole_insn))
1117 {
1118 /* LR and SP are input registers. PAC is in R12 and PC is a
1119 valid BTI landing pad. LR is signed from this point onwards.
1120 NOP space. */
1121 ra_signed_state = true;
1122 }
1123 else if (IS_BTI (whole_insn))
1124 {
1125 /* Valid BTI landing pad. NOP space. */
1126 }
1127 else if (IS_PACG (whole_insn))
1128 {
1129 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1130 this point onwards. */
1131 ra_signed_state = true;
1132 }
1133 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1134 {
1135 /* These instructions appear close to the epilogue, when signed
1136 pointers are getting authenticated. */
1137 ra_signed_state = false;
1138 }
1139 /* End of ARMv8.1-m PACBTI extension instructions */
1140 else if (thumb2_instruction_changes_pc (insn, inst2))
1141 {
1142 /* Don't scan past anything that might change control flow. */
1143 break;
1144 }
1145 else
1146 {
1147 /* The optimizer might shove anything into the prologue,
1148 so we just skip what we don't recognize. */
1149 unrecognized_pc = start;
1150 }
1151
1152 arm_gdbarch_tdep *tdep
1153 = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1154
1155 /* Make sure we are dealing with a target that supports ARMv8.1-m
1156 PACBTI. */
1157 if (cache != nullptr && tdep->have_pacbti
1158 && ra_signed_state.has_value ())
1159 {
1160 arm_debug_printf ("Found pacbti instruction at %s",
1161 paddress (gdbarch, start));
1162 arm_debug_printf ("RA is %s",
1163 *ra_signed_state? "signed" : "not signed");
1164 cache->ra_signed_state = ra_signed_state;
1165 }
1166
1167 start += 2;
1168 }
1169 else if (thumb_instruction_changes_pc (insn))
1170 {
1171 /* Don't scan past anything that might change control flow. */
1172 break;
1173 }
1174 else
1175 {
1176 /* The optimizer might shove anything into the prologue,
1177 so we just skip what we don't recognize. */
1178 unrecognized_pc = start;
1179 }
1180
1181 start += 2;
1182 }
1183
1184 arm_debug_printf ("Prologue scan stopped at %s",
1185 paddress (gdbarch, start));
1186
1187 if (unrecognized_pc == 0)
1188 unrecognized_pc = start;
1189
1190 if (cache == NULL)
1191 return unrecognized_pc;
1192
1193 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1194 {
1195 /* Frame pointer is fp. Frame size is constant. */
1196 cache->framereg = ARM_FP_REGNUM;
1197 cache->framesize = -regs[ARM_FP_REGNUM].k;
1198 }
1199 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1200 {
1201 /* Frame pointer is r7. Frame size is constant. */
1202 cache->framereg = THUMB_FP_REGNUM;
1203 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1204 }
1205 else
1206 {
1207 /* Try the stack pointer... this is a bit desperate. */
1208 cache->framereg = ARM_SP_REGNUM;
1209 cache->framesize = -regs[ARM_SP_REGNUM].k;
1210 }
1211
1212 for (i = 0; i < 16; i++)
1213 if (stack.find_reg (gdbarch, i, &offset))
1214 cache->saved_regs[i].set_addr (offset);
1215
1216 return unrecognized_pc;
1217 }
1218
1219
1220 /* Try to analyze the instructions starting from PC, which load symbol
1221 __stack_chk_guard. Return the address of instruction after loading this
1222 symbol, set the dest register number to *BASEREG, and set the size of
1223 instructions for loading symbol in OFFSET. Return 0 if instructions are
1224 not recognized. */
1225
1226 static CORE_ADDR
1227 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1228 unsigned int *destreg, int *offset)
1229 {
1230 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1231 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1232 unsigned int low, high, address;
1233
1234 address = 0;
1235 if (is_thumb)
1236 {
1237 unsigned short insn1
1238 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1239
1240 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1241 {
1242 *destreg = bits (insn1, 8, 10);
1243 *offset = 2;
1244 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1245 address = read_memory_unsigned_integer (address, 4,
1246 byte_order_for_code);
1247 }
1248 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1249 {
1250 unsigned short insn2
1251 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1252
1253 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1254
1255 insn1
1256 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1257 insn2
1258 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1259
1260 /* movt Rd, #const */
1261 if ((insn1 & 0xfbc0) == 0xf2c0)
1262 {
1263 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1264 *destreg = bits (insn2, 8, 11);
1265 *offset = 8;
1266 address = (high << 16 | low);
1267 }
1268 }
1269 }
1270 else
1271 {
1272 unsigned int insn
1273 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1274
1275 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1276 {
1277 address = bits (insn, 0, 11) + pc + 8;
1278 address = read_memory_unsigned_integer (address, 4,
1279 byte_order_for_code);
1280
1281 *destreg = bits (insn, 12, 15);
1282 *offset = 4;
1283 }
1284 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1285 {
1286 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1287
1288 insn
1289 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1290
1291 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1292 {
1293 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1294 *destreg = bits (insn, 12, 15);
1295 *offset = 8;
1296 address = (high << 16 | low);
1297 }
1298 }
1299 }
1300
1301 return address;
1302 }
1303
1304 /* Try to skip a sequence of instructions used for stack protector. If PC
1305 points to the first instruction of this sequence, return the address of
1306 first instruction after this sequence, otherwise, return original PC.
1307
1308 On arm, this sequence of instructions is composed of mainly three steps,
1309 Step 1: load symbol __stack_chk_guard,
1310 Step 2: load from address of __stack_chk_guard,
1311 Step 3: store it to somewhere else.
1312
1313 Usually, instructions on step 2 and step 3 are the same on various ARM
1314 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1315 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1316 instructions in step 1 vary from different ARM architectures. On ARMv7,
1317 they are,
1318
1319 movw Rn, #:lower16:__stack_chk_guard
1320 movt Rn, #:upper16:__stack_chk_guard
1321
1322 On ARMv5t, it is,
1323
1324 ldr Rn, .Label
1325 ....
1326 .Lable:
1327 .word __stack_chk_guard
1328
1329 Since ldr/str is a very popular instruction, we can't use them as
1330 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1331 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1332 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1333
1334 static CORE_ADDR
1335 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1336 {
1337 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1338 unsigned int basereg;
1339 struct bound_minimal_symbol stack_chk_guard;
1340 int offset;
1341 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1342 CORE_ADDR addr;
1343
1344 /* Try to parse the instructions in Step 1. */
1345 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1346 &basereg, &offset);
1347 if (!addr)
1348 return pc;
1349
1350 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1351 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1352 Otherwise, this sequence cannot be for stack protector. */
1353 if (stack_chk_guard.minsym == NULL
1354 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1355 return pc;
1356
1357 if (is_thumb)
1358 {
1359 unsigned int destreg;
1360 unsigned short insn
1361 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1362
1363 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1364 if ((insn & 0xf800) != 0x6800)
1365 return pc;
1366 if (bits (insn, 3, 5) != basereg)
1367 return pc;
1368 destreg = bits (insn, 0, 2);
1369
1370 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1371 byte_order_for_code);
1372 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1373 if ((insn & 0xf800) != 0x6000)
1374 return pc;
1375 if (destreg != bits (insn, 0, 2))
1376 return pc;
1377 }
1378 else
1379 {
1380 unsigned int destreg;
1381 unsigned int insn
1382 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1383
1384 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1385 if ((insn & 0x0e500000) != 0x04100000)
1386 return pc;
1387 if (bits (insn, 16, 19) != basereg)
1388 return pc;
1389 destreg = bits (insn, 12, 15);
1390 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1391 insn = read_code_unsigned_integer (pc + offset + 4,
1392 4, byte_order_for_code);
1393 if ((insn & 0x0e500000) != 0x04000000)
1394 return pc;
1395 if (bits (insn, 12, 15) != destreg)
1396 return pc;
1397 }
1398 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1399 on arm. */
1400 if (is_thumb)
1401 return pc + offset + 4;
1402 else
1403 return pc + offset + 8;
1404 }
1405
1406 /* Advance the PC across any function entry prologue instructions to
1407 reach some "real" code.
1408
1409 The APCS (ARM Procedure Call Standard) defines the following
1410 prologue:
1411
1412 mov ip, sp
1413 [stmfd sp!, {a1,a2,a3,a4}]
1414 stmfd sp!, {...,fp,ip,lr,pc}
1415 [stfe f7, [sp, #-12]!]
1416 [stfe f6, [sp, #-12]!]
1417 [stfe f5, [sp, #-12]!]
1418 [stfe f4, [sp, #-12]!]
1419 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1420
1421 static CORE_ADDR
1422 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1423 {
1424 CORE_ADDR func_addr, limit_pc;
1425
1426 /* See if we can determine the end of the prologue via the symbol table.
1427 If so, then return either PC, or the PC after the prologue, whichever
1428 is greater. */
1429 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1430 {
1431 CORE_ADDR post_prologue_pc
1432 = skip_prologue_using_sal (gdbarch, func_addr);
1433 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1434
1435 if (post_prologue_pc)
1436 post_prologue_pc
1437 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1438
1439
1440 /* GCC always emits a line note before the prologue and another
1441 one after, even if the two are at the same address or on the
1442 same line. Take advantage of this so that we do not need to
1443 know every instruction that might appear in the prologue. We
1444 will have producer information for most binaries; if it is
1445 missing (e.g. for -gstabs), assuming the GNU tools. */
1446 if (post_prologue_pc
1447 && (cust == NULL
1448 || cust->producer () == NULL
1449 || startswith (cust->producer (), "GNU ")
1450 || producer_is_llvm (cust->producer ())))
1451 return post_prologue_pc;
1452
1453 if (post_prologue_pc != 0)
1454 {
1455 CORE_ADDR analyzed_limit;
1456
1457 /* For non-GCC compilers, make sure the entire line is an
1458 acceptable prologue; GDB will round this function's
1459 return value up to the end of the following line so we
1460 can not skip just part of a line (and we do not want to).
1461
1462 RealView does not treat the prologue specially, but does
1463 associate prologue code with the opening brace; so this
1464 lets us skip the first line if we think it is the opening
1465 brace. */
1466 if (arm_pc_is_thumb (gdbarch, func_addr))
1467 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1468 post_prologue_pc, NULL);
1469 else
1470 analyzed_limit
1471 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1472 NULL, target_arm_instruction_reader ());
1473
1474 if (analyzed_limit != post_prologue_pc)
1475 return func_addr;
1476
1477 return post_prologue_pc;
1478 }
1479 }
1480
1481 /* Can't determine prologue from the symbol table, need to examine
1482 instructions. */
1483
1484 /* Find an upper limit on the function prologue using the debug
1485 information. If the debug information could not be used to provide
1486 that bound, then use an arbitrary large number as the upper bound. */
1487 /* Like arm_scan_prologue, stop no later than pc + 64. */
1488 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1489 if (limit_pc == 0)
1490 limit_pc = pc + 64; /* Magic. */
1491
1492
1493 /* Check if this is Thumb code. */
1494 if (arm_pc_is_thumb (gdbarch, pc))
1495 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1496 else
1497 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1498 target_arm_instruction_reader ());
1499 }
1500
1501 /* *INDENT-OFF* */
1502 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1503 This function decodes a Thumb function prologue to determine:
1504 1) the size of the stack frame
1505 2) which registers are saved on it
1506 3) the offsets of saved regs
1507 4) the offset from the stack pointer to the frame pointer
1508
1509 A typical Thumb function prologue would create this stack frame
1510 (offsets relative to FP)
1511 old SP -> 24 stack parameters
1512 20 LR
1513 16 R7
1514 R7 -> 0 local variables (16 bytes)
1515 SP -> -12 additional stack space (12 bytes)
1516 The frame size would thus be 36 bytes, and the frame offset would be
1517 12 bytes. The frame register is R7.
1518
1519 The comments for thumb_skip_prolog() describe the algorithm we use
1520 to detect the end of the prolog. */
1521 /* *INDENT-ON* */
1522
1523 static void
1524 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1525 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1526 {
1527 CORE_ADDR prologue_start;
1528 CORE_ADDR prologue_end;
1529
1530 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1531 &prologue_end))
1532 {
1533 /* See comment in arm_scan_prologue for an explanation of
1534 this heuristics. */
1535 if (prologue_end > prologue_start + 64)
1536 {
1537 prologue_end = prologue_start + 64;
1538 }
1539 }
1540 else
1541 /* We're in the boondocks: we have no idea where the start of the
1542 function is. */
1543 return;
1544
1545 prologue_end = std::min (prologue_end, prev_pc);
1546
1547 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1548 }
1549
1550 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1551 otherwise. */
1552
1553 static int
1554 arm_instruction_restores_sp (unsigned int insn)
1555 {
1556 if (bits (insn, 28, 31) != INST_NV)
1557 {
1558 if ((insn & 0x0df0f000) == 0x0080d000
1559 /* ADD SP (register or immediate). */
1560 || (insn & 0x0df0f000) == 0x0040d000
1561 /* SUB SP (register or immediate). */
1562 || (insn & 0x0ffffff0) == 0x01a0d000
1563 /* MOV SP. */
1564 || (insn & 0x0fff0000) == 0x08bd0000
1565 /* POP (LDMIA). */
1566 || (insn & 0x0fff0000) == 0x049d0000)
1567 /* POP of a single register. */
1568 return 1;
1569 }
1570
1571 return 0;
1572 }
1573
1574 /* Implement immediate value decoding, as described in section A5.2.4
1575 (Modified immediate constants in ARM instructions) of the ARM Architecture
1576 Reference Manual (ARMv7-A and ARMv7-R edition). */
1577
1578 static uint32_t
1579 arm_expand_immediate (uint32_t imm)
1580 {
1581 /* Immediate values are 12 bits long. */
1582 gdb_assert ((imm & 0xfffff000) == 0);
1583
1584 uint32_t unrotated_value = imm & 0xff;
1585 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1586
1587 if (rotate_amount == 0)
1588 return unrotated_value;
1589
1590 return ((unrotated_value >> rotate_amount)
1591 | (unrotated_value << (32 - rotate_amount)));
1592 }
1593
1594 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1595 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1596 fill it in. Return the first address not recognized as a prologue
1597 instruction.
1598
1599 We recognize all the instructions typically found in ARM prologues,
1600 plus harmless instructions which can be skipped (either for analysis
1601 purposes, or a more restrictive set that can be skipped when finding
1602 the end of the prologue). */
1603
1604 static CORE_ADDR
1605 arm_analyze_prologue (struct gdbarch *gdbarch,
1606 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1607 struct arm_prologue_cache *cache,
1608 const arm_instruction_reader &insn_reader)
1609 {
1610 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1611 int regno;
1612 CORE_ADDR offset, current_pc;
1613 pv_t regs[ARM_FPS_REGNUM];
1614 CORE_ADDR unrecognized_pc = 0;
1615 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1616
1617 /* Search the prologue looking for instructions that set up the
1618 frame pointer, adjust the stack pointer, and save registers.
1619
1620 Be careful, however, and if it doesn't look like a prologue,
1621 don't try to scan it. If, for instance, a frameless function
1622 begins with stmfd sp!, then we will tell ourselves there is
1623 a frame, which will confuse stack traceback, as well as "finish"
1624 and other operations that rely on a knowledge of the stack
1625 traceback. */
1626
1627 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1628 regs[regno] = pv_register (regno, 0);
1629 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1630
1631 for (current_pc = prologue_start;
1632 current_pc < prologue_end;
1633 current_pc += 4)
1634 {
1635 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1636
1637 if (insn == 0xe1a0c00d) /* mov ip, sp */
1638 {
1639 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1640 continue;
1641 }
1642 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1643 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1644 {
1645 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1646 int rd = bits (insn, 12, 15);
1647 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1648 continue;
1649 }
1650 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1651 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1652 {
1653 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1654 int rd = bits (insn, 12, 15);
1655 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1656 continue;
1657 }
1658 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1659 [sp, #-4]! */
1660 {
1661 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1662 break;
1663 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1664 stack.store (regs[ARM_SP_REGNUM], 4,
1665 regs[bits (insn, 12, 15)]);
1666 continue;
1667 }
1668 else if ((insn & 0xffff0000) == 0xe92d0000)
1669 /* stmfd sp!, {..., fp, ip, lr, pc}
1670 or
1671 stmfd sp!, {a1, a2, a3, a4} */
1672 {
1673 int mask = insn & 0xffff;
1674
1675 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1676 break;
1677
1678 /* Calculate offsets of saved registers. */
1679 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1680 if (mask & (1 << regno))
1681 {
1682 regs[ARM_SP_REGNUM]
1683 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1684 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1685 }
1686 }
1687 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1688 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1689 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1690 {
1691 /* No need to add this to saved_regs -- it's just an arg reg. */
1692 continue;
1693 }
1694 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1695 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1696 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1697 {
1698 /* No need to add this to saved_regs -- it's just an arg reg. */
1699 continue;
1700 }
1701 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1702 { registers } */
1703 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1704 {
1705 /* No need to add this to saved_regs -- it's just arg regs. */
1706 continue;
1707 }
1708 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1709 {
1710 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1711 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1712 }
1713 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1714 {
1715 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1716 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1717 }
1718 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1719 [sp, -#c]! */
1720 && tdep->have_fpa_registers)
1721 {
1722 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1723 break;
1724
1725 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1726 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1727 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1728 }
1729 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1730 [sp!] */
1731 && tdep->have_fpa_registers)
1732 {
1733 int n_saved_fp_regs;
1734 unsigned int fp_start_reg, fp_bound_reg;
1735
1736 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1737 break;
1738
1739 if ((insn & 0x800) == 0x800) /* N0 is set */
1740 {
1741 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1742 n_saved_fp_regs = 3;
1743 else
1744 n_saved_fp_regs = 1;
1745 }
1746 else
1747 {
1748 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1749 n_saved_fp_regs = 2;
1750 else
1751 n_saved_fp_regs = 4;
1752 }
1753
1754 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1755 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1756 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1757 {
1758 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1759 stack.store (regs[ARM_SP_REGNUM], 12,
1760 regs[fp_start_reg++]);
1761 }
1762 }
1763 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1764 {
1765 /* Allow some special function calls when skipping the
1766 prologue; GCC generates these before storing arguments to
1767 the stack. */
1768 CORE_ADDR dest = BranchDest (current_pc, insn);
1769
1770 if (skip_prologue_function (gdbarch, dest, 0))
1771 continue;
1772 else
1773 break;
1774 }
1775 else if ((insn & 0xf0000000) != 0xe0000000)
1776 break; /* Condition not true, exit early. */
1777 else if (arm_instruction_changes_pc (insn))
1778 /* Don't scan past anything that might change control flow. */
1779 break;
1780 else if (arm_instruction_restores_sp (insn))
1781 {
1782 /* Don't scan past the epilogue. */
1783 break;
1784 }
1785 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1786 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1787 /* Ignore block loads from the stack, potentially copying
1788 parameters from memory. */
1789 continue;
1790 else if ((insn & 0xfc500000) == 0xe4100000
1791 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1792 /* Similarly ignore single loads from the stack. */
1793 continue;
1794 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1795 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1796 register instead of the stack. */
1797 continue;
1798 else
1799 {
1800 /* The optimizer might shove anything into the prologue, if
1801 we build up cache (cache != NULL) from scanning prologue,
1802 we just skip what we don't recognize and scan further to
1803 make cache as complete as possible. However, if we skip
1804 prologue, we'll stop immediately on unrecognized
1805 instruction. */
1806 unrecognized_pc = current_pc;
1807 if (cache != NULL)
1808 continue;
1809 else
1810 break;
1811 }
1812 }
1813
1814 if (unrecognized_pc == 0)
1815 unrecognized_pc = current_pc;
1816
1817 if (cache)
1818 {
1819 int framereg, framesize;
1820
1821 /* The frame size is just the distance from the frame register
1822 to the original stack pointer. */
1823 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1824 {
1825 /* Frame pointer is fp. */
1826 framereg = ARM_FP_REGNUM;
1827 framesize = -regs[ARM_FP_REGNUM].k;
1828 }
1829 else
1830 {
1831 /* Try the stack pointer... this is a bit desperate. */
1832 framereg = ARM_SP_REGNUM;
1833 framesize = -regs[ARM_SP_REGNUM].k;
1834 }
1835
1836 cache->framereg = framereg;
1837 cache->framesize = framesize;
1838
1839 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1840 if (stack.find_reg (gdbarch, regno, &offset))
1841 cache->saved_regs[regno].set_addr (offset);
1842 }
1843
1844 arm_debug_printf ("Prologue scan stopped at %s",
1845 paddress (gdbarch, unrecognized_pc));
1846
1847 return unrecognized_pc;
1848 }
1849
1850 static void
1851 arm_scan_prologue (struct frame_info *this_frame,
1852 struct arm_prologue_cache *cache)
1853 {
1854 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1855 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1856 CORE_ADDR prologue_start, prologue_end;
1857 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1858 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1859 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1860
1861 /* Assume there is no frame until proven otherwise. */
1862 cache->framereg = ARM_SP_REGNUM;
1863 cache->framesize = 0;
1864
1865 /* Check for Thumb prologue. */
1866 if (arm_frame_is_thumb (this_frame))
1867 {
1868 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1869 return;
1870 }
1871
1872 /* Find the function prologue. If we can't find the function in
1873 the symbol table, peek in the stack frame to find the PC. */
1874 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1875 &prologue_end))
1876 {
1877 /* One way to find the end of the prologue (which works well
1878 for unoptimized code) is to do the following:
1879
1880 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1881
1882 if (sal.line == 0)
1883 prologue_end = prev_pc;
1884 else if (sal.end < prologue_end)
1885 prologue_end = sal.end;
1886
1887 This mechanism is very accurate so long as the optimizer
1888 doesn't move any instructions from the function body into the
1889 prologue. If this happens, sal.end will be the last
1890 instruction in the first hunk of prologue code just before
1891 the first instruction that the scheduler has moved from
1892 the body to the prologue.
1893
1894 In order to make sure that we scan all of the prologue
1895 instructions, we use a slightly less accurate mechanism which
1896 may scan more than necessary. To help compensate for this
1897 lack of accuracy, the prologue scanning loop below contains
1898 several clauses which'll cause the loop to terminate early if
1899 an implausible prologue instruction is encountered.
1900
1901 The expression
1902
1903 prologue_start + 64
1904
1905 is a suitable endpoint since it accounts for the largest
1906 possible prologue plus up to five instructions inserted by
1907 the scheduler. */
1908
1909 if (prologue_end > prologue_start + 64)
1910 {
1911 prologue_end = prologue_start + 64; /* See above. */
1912 }
1913 }
1914 else
1915 {
1916 /* We have no symbol information. Our only option is to assume this
1917 function has a standard stack frame and the normal frame register.
1918 Then, we can find the value of our frame pointer on entrance to
1919 the callee (or at the present moment if this is the innermost frame).
1920 The value stored there should be the address of the stmfd + 8. */
1921 CORE_ADDR frame_loc;
1922 ULONGEST return_value;
1923
1924 /* AAPCS does not use a frame register, so we can abort here. */
1925 if (tdep->arm_abi == ARM_ABI_AAPCS)
1926 return;
1927
1928 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1929 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1930 &return_value))
1931 return;
1932 else
1933 {
1934 prologue_start = gdbarch_addr_bits_remove
1935 (gdbarch, return_value) - 8;
1936 prologue_end = prologue_start + 64; /* See above. */
1937 }
1938 }
1939
1940 if (prev_pc < prologue_end)
1941 prologue_end = prev_pc;
1942
1943 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1944 target_arm_instruction_reader ());
1945 }
1946
1947 static struct arm_prologue_cache *
1948 arm_make_prologue_cache (struct frame_info *this_frame)
1949 {
1950 int reg;
1951 struct arm_prologue_cache *cache;
1952 CORE_ADDR unwound_fp;
1953
1954 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1955 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1956
1957 arm_scan_prologue (this_frame, cache);
1958
1959 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1960 if (unwound_fp == 0)
1961 return cache;
1962
1963 cache->prev_sp = unwound_fp + cache->framesize;
1964
1965 /* Calculate actual addresses of saved registers using offsets
1966 determined by arm_scan_prologue. */
1967 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1968 if (cache->saved_regs[reg].is_addr ())
1969 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
1970 + cache->prev_sp);
1971
1972 return cache;
1973 }
1974
1975 /* Implementation of the stop_reason hook for arm_prologue frames. */
1976
1977 static enum unwind_stop_reason
1978 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1979 void **this_cache)
1980 {
1981 struct arm_prologue_cache *cache;
1982 CORE_ADDR pc;
1983
1984 if (*this_cache == NULL)
1985 *this_cache = arm_make_prologue_cache (this_frame);
1986 cache = (struct arm_prologue_cache *) *this_cache;
1987
1988 /* This is meant to halt the backtrace at "_start". */
1989 pc = get_frame_pc (this_frame);
1990 gdbarch *arch = get_frame_arch (this_frame);
1991 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
1992 if (pc <= tdep->lowest_pc)
1993 return UNWIND_OUTERMOST;
1994
1995 /* If we've hit a wall, stop. */
1996 if (cache->prev_sp == 0)
1997 return UNWIND_OUTERMOST;
1998
1999 return UNWIND_NO_REASON;
2000 }
2001
2002 /* Our frame ID for a normal frame is the current function's starting PC
2003 and the caller's SP when we were called. */
2004
2005 static void
2006 arm_prologue_this_id (struct frame_info *this_frame,
2007 void **this_cache,
2008 struct frame_id *this_id)
2009 {
2010 struct arm_prologue_cache *cache;
2011 struct frame_id id;
2012 CORE_ADDR pc, func;
2013
2014 if (*this_cache == NULL)
2015 *this_cache = arm_make_prologue_cache (this_frame);
2016 cache = (struct arm_prologue_cache *) *this_cache;
2017
2018 /* Use function start address as part of the frame ID. If we cannot
2019 identify the start address (due to missing symbol information),
2020 fall back to just using the current PC. */
2021 pc = get_frame_pc (this_frame);
2022 func = get_frame_func (this_frame);
2023 if (!func)
2024 func = pc;
2025
2026 id = frame_id_build (cache->prev_sp, func);
2027 *this_id = id;
2028 }
2029
2030 static struct value *
2031 arm_prologue_prev_register (struct frame_info *this_frame,
2032 void **this_cache,
2033 int prev_regnum)
2034 {
2035 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2036 struct arm_prologue_cache *cache;
2037
2038 if (*this_cache == NULL)
2039 *this_cache = arm_make_prologue_cache (this_frame);
2040 cache = (struct arm_prologue_cache *) *this_cache;
2041
2042 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
2043
2044 /* If this frame has signed the return address, mark it as so. */
2045 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2046 && *cache->ra_signed_state)
2047 set_frame_previous_pc_masked (this_frame);
2048
2049 /* If we are asked to unwind the PC, then we need to return the LR
2050 instead. The prologue may save PC, but it will point into this
2051 frame's prologue, not the next frame's resume location. Also
2052 strip the saved T bit. A valid LR may have the low bit set, but
2053 a valid PC never does. */
2054 if (prev_regnum == ARM_PC_REGNUM)
2055 {
2056 CORE_ADDR lr;
2057
2058 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2059 return frame_unwind_got_constant (this_frame, prev_regnum,
2060 arm_addr_bits_remove (gdbarch, lr));
2061 }
2062
2063 /* SP is generally not saved to the stack, but this frame is
2064 identified by the next frame's stack pointer at the time of the call.
2065 The value was already reconstructed into PREV_SP. */
2066 if (prev_regnum == ARM_SP_REGNUM)
2067 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2068
2069 /* The CPSR may have been changed by the call instruction and by the
2070 called function. The only bit we can reconstruct is the T bit,
2071 by checking the low bit of LR as of the call. This is a reliable
2072 indicator of Thumb-ness except for some ARM v4T pre-interworking
2073 Thumb code, which could get away with a clear low bit as long as
2074 the called function did not use bx. Guess that all other
2075 bits are unchanged; the condition flags are presumably lost,
2076 but the processor status is likely valid. */
2077 if (prev_regnum == ARM_PS_REGNUM)
2078 {
2079 CORE_ADDR lr, cpsr;
2080 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2081
2082 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2083 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2084 if (IS_THUMB_ADDR (lr))
2085 cpsr |= t_bit;
2086 else
2087 cpsr &= ~t_bit;
2088 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2089 }
2090
2091 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2092 prev_regnum);
2093 }
2094
2095 static frame_unwind arm_prologue_unwind = {
2096 "arm prologue",
2097 NORMAL_FRAME,
2098 arm_prologue_unwind_stop_reason,
2099 arm_prologue_this_id,
2100 arm_prologue_prev_register,
2101 NULL,
2102 default_frame_sniffer
2103 };
2104
2105 /* Maintain a list of ARM exception table entries per objfile, similar to the
2106 list of mapping symbols. We only cache entries for standard ARM-defined
2107 personality routines; the cache will contain only the frame unwinding
2108 instructions associated with the entry (not the descriptors). */
2109
2110 struct arm_exidx_entry
2111 {
2112 CORE_ADDR addr;
2113 gdb_byte *entry;
2114
2115 bool operator< (const arm_exidx_entry &other) const
2116 {
2117 return addr < other.addr;
2118 }
2119 };
2120
2121 struct arm_exidx_data
2122 {
2123 std::vector<std::vector<arm_exidx_entry>> section_maps;
2124 };
2125
2126 /* Per-BFD key to store exception handling information. */
2127 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2128
2129 static struct obj_section *
2130 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2131 {
2132 struct obj_section *osect;
2133
2134 ALL_OBJFILE_OSECTIONS (objfile, osect)
2135 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2136 {
2137 bfd_vma start, size;
2138 start = bfd_section_vma (osect->the_bfd_section);
2139 size = bfd_section_size (osect->the_bfd_section);
2140
2141 if (start <= vma && vma < start + size)
2142 return osect;
2143 }
2144
2145 return NULL;
2146 }
2147
2148 /* Parse contents of exception table and exception index sections
2149 of OBJFILE, and fill in the exception table entry cache.
2150
2151 For each entry that refers to a standard ARM-defined personality
2152 routine, extract the frame unwinding instructions (from either
2153 the index or the table section). The unwinding instructions
2154 are normalized by:
2155 - extracting them from the rest of the table data
2156 - converting to host endianness
2157 - appending the implicit 0xb0 ("Finish") code
2158
2159 The extracted and normalized instructions are stored for later
2160 retrieval by the arm_find_exidx_entry routine. */
2161
2162 static void
2163 arm_exidx_new_objfile (struct objfile *objfile)
2164 {
2165 struct arm_exidx_data *data;
2166 asection *exidx, *extab;
2167 bfd_vma exidx_vma = 0, extab_vma = 0;
2168 LONGEST i;
2169
2170 /* If we've already touched this file, do nothing. */
2171 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2172 return;
2173
2174 /* Read contents of exception table and index. */
2175 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2176 gdb::byte_vector exidx_data;
2177 if (exidx)
2178 {
2179 exidx_vma = bfd_section_vma (exidx);
2180 exidx_data.resize (bfd_section_size (exidx));
2181
2182 if (!bfd_get_section_contents (objfile->obfd, exidx,
2183 exidx_data.data (), 0,
2184 exidx_data.size ()))
2185 return;
2186 }
2187
2188 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2189 gdb::byte_vector extab_data;
2190 if (extab)
2191 {
2192 extab_vma = bfd_section_vma (extab);
2193 extab_data.resize (bfd_section_size (extab));
2194
2195 if (!bfd_get_section_contents (objfile->obfd, extab,
2196 extab_data.data (), 0,
2197 extab_data.size ()))
2198 return;
2199 }
2200
2201 /* Allocate exception table data structure. */
2202 data = arm_exidx_data_key.emplace (objfile->obfd);
2203 data->section_maps.resize (objfile->obfd->section_count);
2204
2205 /* Fill in exception table. */
2206 for (i = 0; i < exidx_data.size () / 8; i++)
2207 {
2208 struct arm_exidx_entry new_exidx_entry;
2209 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2210 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2211 exidx_data.data () + i * 8 + 4);
2212 bfd_vma addr = 0, word = 0;
2213 int n_bytes = 0, n_words = 0;
2214 struct obj_section *sec;
2215 gdb_byte *entry = NULL;
2216
2217 /* Extract address of start of function. */
2218 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2219 idx += exidx_vma + i * 8;
2220
2221 /* Find section containing function and compute section offset. */
2222 sec = arm_obj_section_from_vma (objfile, idx);
2223 if (sec == NULL)
2224 continue;
2225 idx -= bfd_section_vma (sec->the_bfd_section);
2226
2227 /* Determine address of exception table entry. */
2228 if (val == 1)
2229 {
2230 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2231 }
2232 else if ((val & 0xff000000) == 0x80000000)
2233 {
2234 /* Exception table entry embedded in .ARM.exidx
2235 -- must be short form. */
2236 word = val;
2237 n_bytes = 3;
2238 }
2239 else if (!(val & 0x80000000))
2240 {
2241 /* Exception table entry in .ARM.extab. */
2242 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2243 addr += exidx_vma + i * 8 + 4;
2244
2245 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2246 {
2247 word = bfd_h_get_32 (objfile->obfd,
2248 extab_data.data () + addr - extab_vma);
2249 addr += 4;
2250
2251 if ((word & 0xff000000) == 0x80000000)
2252 {
2253 /* Short form. */
2254 n_bytes = 3;
2255 }
2256 else if ((word & 0xff000000) == 0x81000000
2257 || (word & 0xff000000) == 0x82000000)
2258 {
2259 /* Long form. */
2260 n_bytes = 2;
2261 n_words = ((word >> 16) & 0xff);
2262 }
2263 else if (!(word & 0x80000000))
2264 {
2265 bfd_vma pers;
2266 struct obj_section *pers_sec;
2267 int gnu_personality = 0;
2268
2269 /* Custom personality routine. */
2270 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2271 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2272
2273 /* Check whether we've got one of the variants of the
2274 GNU personality routines. */
2275 pers_sec = arm_obj_section_from_vma (objfile, pers);
2276 if (pers_sec)
2277 {
2278 static const char *personality[] =
2279 {
2280 "__gcc_personality_v0",
2281 "__gxx_personality_v0",
2282 "__gcj_personality_v0",
2283 "__gnu_objc_personality_v0",
2284 NULL
2285 };
2286
2287 CORE_ADDR pc = pers + pers_sec->offset ();
2288 int k;
2289
2290 for (k = 0; personality[k]; k++)
2291 if (lookup_minimal_symbol_by_pc_name
2292 (pc, personality[k], objfile))
2293 {
2294 gnu_personality = 1;
2295 break;
2296 }
2297 }
2298
2299 /* If so, the next word contains a word count in the high
2300 byte, followed by the same unwind instructions as the
2301 pre-defined forms. */
2302 if (gnu_personality
2303 && addr + 4 <= extab_vma + extab_data.size ())
2304 {
2305 word = bfd_h_get_32 (objfile->obfd,
2306 (extab_data.data ()
2307 + addr - extab_vma));
2308 addr += 4;
2309 n_bytes = 3;
2310 n_words = ((word >> 24) & 0xff);
2311 }
2312 }
2313 }
2314 }
2315
2316 /* Sanity check address. */
2317 if (n_words)
2318 if (addr < extab_vma
2319 || addr + 4 * n_words > extab_vma + extab_data.size ())
2320 n_words = n_bytes = 0;
2321
2322 /* The unwind instructions reside in WORD (only the N_BYTES least
2323 significant bytes are valid), followed by N_WORDS words in the
2324 extab section starting at ADDR. */
2325 if (n_bytes || n_words)
2326 {
2327 gdb_byte *p = entry
2328 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2329 n_bytes + n_words * 4 + 1);
2330
2331 while (n_bytes--)
2332 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2333
2334 while (n_words--)
2335 {
2336 word = bfd_h_get_32 (objfile->obfd,
2337 extab_data.data () + addr - extab_vma);
2338 addr += 4;
2339
2340 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2341 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2342 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2343 *p++ = (gdb_byte) (word & 0xff);
2344 }
2345
2346 /* Implied "Finish" to terminate the list. */
2347 *p++ = 0xb0;
2348 }
2349
2350 /* Push entry onto vector. They are guaranteed to always
2351 appear in order of increasing addresses. */
2352 new_exidx_entry.addr = idx;
2353 new_exidx_entry.entry = entry;
2354 data->section_maps[sec->the_bfd_section->index].push_back
2355 (new_exidx_entry);
2356 }
2357 }
2358
2359 /* Search for the exception table entry covering MEMADDR. If one is found,
2360 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2361 set *START to the start of the region covered by this entry. */
2362
2363 static gdb_byte *
2364 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2365 {
2366 struct obj_section *sec;
2367
2368 sec = find_pc_section (memaddr);
2369 if (sec != NULL)
2370 {
2371 struct arm_exidx_data *data;
2372 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2373
2374 data = arm_exidx_data_key.get (sec->objfile->obfd);
2375 if (data != NULL)
2376 {
2377 std::vector<arm_exidx_entry> &map
2378 = data->section_maps[sec->the_bfd_section->index];
2379 if (!map.empty ())
2380 {
2381 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2382
2383 /* std::lower_bound finds the earliest ordered insertion
2384 point. If the following symbol starts at this exact
2385 address, we use that; otherwise, the preceding
2386 exception table entry covers this address. */
2387 if (idx < map.end ())
2388 {
2389 if (idx->addr == map_key.addr)
2390 {
2391 if (start)
2392 *start = idx->addr + sec->addr ();
2393 return idx->entry;
2394 }
2395 }
2396
2397 if (idx > map.begin ())
2398 {
2399 idx = idx - 1;
2400 if (start)
2401 *start = idx->addr + sec->addr ();
2402 return idx->entry;
2403 }
2404 }
2405 }
2406 }
2407
2408 return NULL;
2409 }
2410
2411 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2412 instruction list from the ARM exception table entry ENTRY, allocate and
2413 return a prologue cache structure describing how to unwind this frame.
2414
2415 Return NULL if the unwinding instruction list contains a "spare",
2416 "reserved" or "refuse to unwind" instruction as defined in section
2417 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2418 for the ARM Architecture" document. */
2419
2420 static struct arm_prologue_cache *
2421 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2422 {
2423 CORE_ADDR vsp = 0;
2424 int vsp_valid = 0;
2425
2426 struct arm_prologue_cache *cache;
2427 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2428 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2429
2430 for (;;)
2431 {
2432 gdb_byte insn;
2433
2434 /* Whenever we reload SP, we actually have to retrieve its
2435 actual value in the current frame. */
2436 if (!vsp_valid)
2437 {
2438 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2439 {
2440 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2441 vsp = get_frame_register_unsigned (this_frame, reg);
2442 }
2443 else
2444 {
2445 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2446 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2447 }
2448
2449 vsp_valid = 1;
2450 }
2451
2452 /* Decode next unwind instruction. */
2453 insn = *entry++;
2454
2455 if ((insn & 0xc0) == 0)
2456 {
2457 int offset = insn & 0x3f;
2458 vsp += (offset << 2) + 4;
2459 }
2460 else if ((insn & 0xc0) == 0x40)
2461 {
2462 int offset = insn & 0x3f;
2463 vsp -= (offset << 2) + 4;
2464 }
2465 else if ((insn & 0xf0) == 0x80)
2466 {
2467 int mask = ((insn & 0xf) << 8) | *entry++;
2468 int i;
2469
2470 /* The special case of an all-zero mask identifies
2471 "Refuse to unwind". We return NULL to fall back
2472 to the prologue analyzer. */
2473 if (mask == 0)
2474 return NULL;
2475
2476 /* Pop registers r4..r15 under mask. */
2477 for (i = 0; i < 12; i++)
2478 if (mask & (1 << i))
2479 {
2480 cache->saved_regs[4 + i].set_addr (vsp);
2481 vsp += 4;
2482 }
2483
2484 /* Special-case popping SP -- we need to reload vsp. */
2485 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2486 vsp_valid = 0;
2487 }
2488 else if ((insn & 0xf0) == 0x90)
2489 {
2490 int reg = insn & 0xf;
2491
2492 /* Reserved cases. */
2493 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2494 return NULL;
2495
2496 /* Set SP from another register and mark VSP for reload. */
2497 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2498 vsp_valid = 0;
2499 }
2500 else if ((insn & 0xf0) == 0xa0)
2501 {
2502 int count = insn & 0x7;
2503 int pop_lr = (insn & 0x8) != 0;
2504 int i;
2505
2506 /* Pop r4..r[4+count]. */
2507 for (i = 0; i <= count; i++)
2508 {
2509 cache->saved_regs[4 + i].set_addr (vsp);
2510 vsp += 4;
2511 }
2512
2513 /* If indicated by flag, pop LR as well. */
2514 if (pop_lr)
2515 {
2516 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2517 vsp += 4;
2518 }
2519 }
2520 else if (insn == 0xb0)
2521 {
2522 /* We could only have updated PC by popping into it; if so, it
2523 will show up as address. Otherwise, copy LR into PC. */
2524 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2525 cache->saved_regs[ARM_PC_REGNUM]
2526 = cache->saved_regs[ARM_LR_REGNUM];
2527
2528 /* We're done. */
2529 break;
2530 }
2531 else if (insn == 0xb1)
2532 {
2533 int mask = *entry++;
2534 int i;
2535
2536 /* All-zero mask and mask >= 16 is "spare". */
2537 if (mask == 0 || mask >= 16)
2538 return NULL;
2539
2540 /* Pop r0..r3 under mask. */
2541 for (i = 0; i < 4; i++)
2542 if (mask & (1 << i))
2543 {
2544 cache->saved_regs[i].set_addr (vsp);
2545 vsp += 4;
2546 }
2547 }
2548 else if (insn == 0xb2)
2549 {
2550 ULONGEST offset = 0;
2551 unsigned shift = 0;
2552
2553 do
2554 {
2555 offset |= (*entry & 0x7f) << shift;
2556 shift += 7;
2557 }
2558 while (*entry++ & 0x80);
2559
2560 vsp += 0x204 + (offset << 2);
2561 }
2562 else if (insn == 0xb3)
2563 {
2564 int start = *entry >> 4;
2565 int count = (*entry++) & 0xf;
2566 int i;
2567
2568 /* Only registers D0..D15 are valid here. */
2569 if (start + count >= 16)
2570 return NULL;
2571
2572 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2573 for (i = 0; i <= count; i++)
2574 {
2575 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2576 vsp += 8;
2577 }
2578
2579 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2580 vsp += 4;
2581 }
2582 else if ((insn & 0xf8) == 0xb8)
2583 {
2584 int count = insn & 0x7;
2585 int i;
2586
2587 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2588 for (i = 0; i <= count; i++)
2589 {
2590 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2591 vsp += 8;
2592 }
2593
2594 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2595 vsp += 4;
2596 }
2597 else if (insn == 0xc6)
2598 {
2599 int start = *entry >> 4;
2600 int count = (*entry++) & 0xf;
2601 int i;
2602
2603 /* Only registers WR0..WR15 are valid. */
2604 if (start + count >= 16)
2605 return NULL;
2606
2607 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2608 for (i = 0; i <= count; i++)
2609 {
2610 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2611 vsp += 8;
2612 }
2613 }
2614 else if (insn == 0xc7)
2615 {
2616 int mask = *entry++;
2617 int i;
2618
2619 /* All-zero mask and mask >= 16 is "spare". */
2620 if (mask == 0 || mask >= 16)
2621 return NULL;
2622
2623 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2624 for (i = 0; i < 4; i++)
2625 if (mask & (1 << i))
2626 {
2627 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2628 vsp += 4;
2629 }
2630 }
2631 else if ((insn & 0xf8) == 0xc0)
2632 {
2633 int count = insn & 0x7;
2634 int i;
2635
2636 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2637 for (i = 0; i <= count; i++)
2638 {
2639 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2640 vsp += 8;
2641 }
2642 }
2643 else if (insn == 0xc8)
2644 {
2645 int start = *entry >> 4;
2646 int count = (*entry++) & 0xf;
2647 int i;
2648
2649 /* Only registers D0..D31 are valid. */
2650 if (start + count >= 16)
2651 return NULL;
2652
2653 /* Pop VFP double-precision registers
2654 D[16+start]..D[16+start+count]. */
2655 for (i = 0; i <= count; i++)
2656 {
2657 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2658 vsp += 8;
2659 }
2660 }
2661 else if (insn == 0xc9)
2662 {
2663 int start = *entry >> 4;
2664 int count = (*entry++) & 0xf;
2665 int i;
2666
2667 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2668 for (i = 0; i <= count; i++)
2669 {
2670 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2671 vsp += 8;
2672 }
2673 }
2674 else if ((insn & 0xf8) == 0xd0)
2675 {
2676 int count = insn & 0x7;
2677 int i;
2678
2679 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2680 for (i = 0; i <= count; i++)
2681 {
2682 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2683 vsp += 8;
2684 }
2685 }
2686 else
2687 {
2688 /* Everything else is "spare". */
2689 return NULL;
2690 }
2691 }
2692
2693 /* If we restore SP from a register, assume this was the frame register.
2694 Otherwise just fall back to SP as frame register. */
2695 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2696 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2697 else
2698 cache->framereg = ARM_SP_REGNUM;
2699
2700 /* Determine offset to previous frame. */
2701 cache->framesize
2702 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2703
2704 /* We already got the previous SP. */
2705 cache->prev_sp = vsp;
2706
2707 return cache;
2708 }
2709
2710 /* Unwinding via ARM exception table entries. Note that the sniffer
2711 already computes a filled-in prologue cache, which is then used
2712 with the same arm_prologue_this_id and arm_prologue_prev_register
2713 routines also used for prologue-parsing based unwinding. */
2714
2715 static int
2716 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2717 struct frame_info *this_frame,
2718 void **this_prologue_cache)
2719 {
2720 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2721 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2722 CORE_ADDR addr_in_block, exidx_region, func_start;
2723 struct arm_prologue_cache *cache;
2724 gdb_byte *entry;
2725
2726 /* See if we have an ARM exception table entry covering this address. */
2727 addr_in_block = get_frame_address_in_block (this_frame);
2728 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2729 if (!entry)
2730 return 0;
2731
2732 /* The ARM exception table does not describe unwind information
2733 for arbitrary PC values, but is guaranteed to be correct only
2734 at call sites. We have to decide here whether we want to use
2735 ARM exception table information for this frame, or fall back
2736 to using prologue parsing. (Note that if we have DWARF CFI,
2737 this sniffer isn't even called -- CFI is always preferred.)
2738
2739 Before we make this decision, however, we check whether we
2740 actually have *symbol* information for the current frame.
2741 If not, prologue parsing would not work anyway, so we might
2742 as well use the exception table and hope for the best. */
2743 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2744 {
2745 int exc_valid = 0;
2746
2747 /* If the next frame is "normal", we are at a call site in this
2748 frame, so exception information is guaranteed to be valid. */
2749 if (get_next_frame (this_frame)
2750 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2751 exc_valid = 1;
2752
2753 /* We also assume exception information is valid if we're currently
2754 blocked in a system call. The system library is supposed to
2755 ensure this, so that e.g. pthread cancellation works. */
2756 if (arm_frame_is_thumb (this_frame))
2757 {
2758 ULONGEST insn;
2759
2760 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2761 2, byte_order_for_code, &insn)
2762 && (insn & 0xff00) == 0xdf00 /* svc */)
2763 exc_valid = 1;
2764 }
2765 else
2766 {
2767 ULONGEST insn;
2768
2769 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2770 4, byte_order_for_code, &insn)
2771 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2772 exc_valid = 1;
2773 }
2774
2775 /* Bail out if we don't know that exception information is valid. */
2776 if (!exc_valid)
2777 return 0;
2778
2779 /* The ARM exception index does not mark the *end* of the region
2780 covered by the entry, and some functions will not have any entry.
2781 To correctly recognize the end of the covered region, the linker
2782 should have inserted dummy records with a CANTUNWIND marker.
2783
2784 Unfortunately, current versions of GNU ld do not reliably do
2785 this, and thus we may have found an incorrect entry above.
2786 As a (temporary) sanity check, we only use the entry if it
2787 lies *within* the bounds of the function. Note that this check
2788 might reject perfectly valid entries that just happen to cover
2789 multiple functions; therefore this check ought to be removed
2790 once the linker is fixed. */
2791 if (func_start > exidx_region)
2792 return 0;
2793 }
2794
2795 /* Decode the list of unwinding instructions into a prologue cache.
2796 Note that this may fail due to e.g. a "refuse to unwind" code. */
2797 cache = arm_exidx_fill_cache (this_frame, entry);
2798 if (!cache)
2799 return 0;
2800
2801 *this_prologue_cache = cache;
2802 return 1;
2803 }
2804
2805 struct frame_unwind arm_exidx_unwind = {
2806 "arm exidx",
2807 NORMAL_FRAME,
2808 default_frame_unwind_stop_reason,
2809 arm_prologue_this_id,
2810 arm_prologue_prev_register,
2811 NULL,
2812 arm_exidx_unwind_sniffer
2813 };
2814
2815 static struct arm_prologue_cache *
2816 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2817 {
2818 struct arm_prologue_cache *cache;
2819 int reg;
2820
2821 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2822 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2823
2824 /* Still rely on the offset calculated from prologue. */
2825 arm_scan_prologue (this_frame, cache);
2826
2827 /* Since we are in epilogue, the SP has been restored. */
2828 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2829
2830 /* Calculate actual addresses of saved registers using offsets
2831 determined by arm_scan_prologue. */
2832 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2833 if (cache->saved_regs[reg].is_addr ())
2834 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
2835 + cache->prev_sp);
2836
2837 return cache;
2838 }
2839
2840 /* Implementation of function hook 'this_id' in
2841 'struct frame_uwnind' for epilogue unwinder. */
2842
2843 static void
2844 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2845 void **this_cache,
2846 struct frame_id *this_id)
2847 {
2848 struct arm_prologue_cache *cache;
2849 CORE_ADDR pc, func;
2850
2851 if (*this_cache == NULL)
2852 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2853 cache = (struct arm_prologue_cache *) *this_cache;
2854
2855 /* Use function start address as part of the frame ID. If we cannot
2856 identify the start address (due to missing symbol information),
2857 fall back to just using the current PC. */
2858 pc = get_frame_pc (this_frame);
2859 func = get_frame_func (this_frame);
2860 if (func == 0)
2861 func = pc;
2862
2863 (*this_id) = frame_id_build (cache->prev_sp, pc);
2864 }
2865
2866 /* Implementation of function hook 'prev_register' in
2867 'struct frame_uwnind' for epilogue unwinder. */
2868
2869 static struct value *
2870 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2871 void **this_cache, int regnum)
2872 {
2873 if (*this_cache == NULL)
2874 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2875
2876 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2877 }
2878
2879 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2880 CORE_ADDR pc);
2881 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2882 CORE_ADDR pc);
2883
2884 /* Implementation of function hook 'sniffer' in
2885 'struct frame_uwnind' for epilogue unwinder. */
2886
2887 static int
2888 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2889 struct frame_info *this_frame,
2890 void **this_prologue_cache)
2891 {
2892 if (frame_relative_level (this_frame) == 0)
2893 {
2894 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2895 CORE_ADDR pc = get_frame_pc (this_frame);
2896
2897 if (arm_frame_is_thumb (this_frame))
2898 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2899 else
2900 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2901 }
2902 else
2903 return 0;
2904 }
2905
2906 /* Frame unwinder from epilogue. */
2907
2908 static const struct frame_unwind arm_epilogue_frame_unwind =
2909 {
2910 "arm epilogue",
2911 NORMAL_FRAME,
2912 default_frame_unwind_stop_reason,
2913 arm_epilogue_frame_this_id,
2914 arm_epilogue_frame_prev_register,
2915 NULL,
2916 arm_epilogue_frame_sniffer,
2917 };
2918
2919 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2920 trampoline, return the target PC. Otherwise return 0.
2921
2922 void call0a (char c, short s, int i, long l) {}
2923
2924 int main (void)
2925 {
2926 (*pointer_to_call0a) (c, s, i, l);
2927 }
2928
2929 Instead of calling a stub library function _call_via_xx (xx is
2930 the register name), GCC may inline the trampoline in the object
2931 file as below (register r2 has the address of call0a).
2932
2933 .global main
2934 .type main, %function
2935 ...
2936 bl .L1
2937 ...
2938 .size main, .-main
2939
2940 .L1:
2941 bx r2
2942
2943 The trampoline 'bx r2' doesn't belong to main. */
2944
2945 static CORE_ADDR
2946 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2947 {
2948 /* The heuristics of recognizing such trampoline is that FRAME is
2949 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2950 if (arm_frame_is_thumb (frame))
2951 {
2952 gdb_byte buf[2];
2953
2954 if (target_read_memory (pc, buf, 2) == 0)
2955 {
2956 struct gdbarch *gdbarch = get_frame_arch (frame);
2957 enum bfd_endian byte_order_for_code
2958 = gdbarch_byte_order_for_code (gdbarch);
2959 uint16_t insn
2960 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2961
2962 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2963 {
2964 CORE_ADDR dest
2965 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2966
2967 /* Clear the LSB so that gdb core sets step-resume
2968 breakpoint at the right address. */
2969 return UNMAKE_THUMB_ADDR (dest);
2970 }
2971 }
2972 }
2973
2974 return 0;
2975 }
2976
2977 static struct arm_prologue_cache *
2978 arm_make_stub_cache (struct frame_info *this_frame)
2979 {
2980 struct arm_prologue_cache *cache;
2981
2982 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2983 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2984
2985 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2986
2987 return cache;
2988 }
2989
2990 /* Our frame ID for a stub frame is the current SP and LR. */
2991
2992 static void
2993 arm_stub_this_id (struct frame_info *this_frame,
2994 void **this_cache,
2995 struct frame_id *this_id)
2996 {
2997 struct arm_prologue_cache *cache;
2998
2999 if (*this_cache == NULL)
3000 *this_cache = arm_make_stub_cache (this_frame);
3001 cache = (struct arm_prologue_cache *) *this_cache;
3002
3003 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
3004 }
3005
3006 static int
3007 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3008 struct frame_info *this_frame,
3009 void **this_prologue_cache)
3010 {
3011 CORE_ADDR addr_in_block;
3012 gdb_byte dummy[4];
3013 CORE_ADDR pc, start_addr;
3014 const char *name;
3015
3016 addr_in_block = get_frame_address_in_block (this_frame);
3017 pc = get_frame_pc (this_frame);
3018 if (in_plt_section (addr_in_block)
3019 /* We also use the stub winder if the target memory is unreadable
3020 to avoid having the prologue unwinder trying to read it. */
3021 || target_read_memory (pc, dummy, 4) != 0)
3022 return 1;
3023
3024 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3025 && arm_skip_bx_reg (this_frame, pc) != 0)
3026 return 1;
3027
3028 return 0;
3029 }
3030
3031 struct frame_unwind arm_stub_unwind = {
3032 "arm stub",
3033 NORMAL_FRAME,
3034 default_frame_unwind_stop_reason,
3035 arm_stub_this_id,
3036 arm_prologue_prev_register,
3037 NULL,
3038 arm_stub_unwind_sniffer
3039 };
3040
3041 /* Put here the code to store, into CACHE->saved_regs, the addresses
3042 of the saved registers of frame described by THIS_FRAME. CACHE is
3043 returned. */
3044
3045 static struct arm_prologue_cache *
3046 arm_m_exception_cache (struct frame_info *this_frame)
3047 {
3048 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3049 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3050 struct arm_prologue_cache *cache;
3051 CORE_ADDR lr;
3052 CORE_ADDR sp;
3053 CORE_ADDR unwound_sp;
3054 LONGEST xpsr;
3055 uint32_t exc_return;
3056 uint32_t process_stack_used;
3057 uint32_t extended_frame_used;
3058 uint32_t secure_stack_used;
3059
3060 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3061 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3062
3063 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3064 describes which bits in LR that define which stack was used prior
3065 to the exception and if FPU is used (causing extended stack frame). */
3066
3067 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3068 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3069
3070 /* Check EXC_RETURN indicator bits. */
3071 exc_return = (((lr >> 28) & 0xf) == 0xf);
3072
3073 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3074 process_stack_used = ((lr & (1 << 2)) != 0);
3075 if (exc_return && process_stack_used)
3076 {
3077 /* Thread (process) stack used.
3078 Potentially this could be other register defined by target, but PSP
3079 can be considered a standard name for the "Process Stack Pointer".
3080 To be fully aware of system registers like MSP and PSP, these could
3081 be added to a separate XML arm-m-system-profile that is valid for
3082 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3083 corefile off-line, then these registers must be defined by GDB,
3084 and also be included in the corefile regsets. */
3085
3086 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3087 if (psp_regnum == -1)
3088 {
3089 /* Thread (process) stack could not be fetched,
3090 give warning and exit. */
3091
3092 warning (_("no PSP thread stack unwinding supported."));
3093
3094 /* Terminate any further stack unwinding by refer to self. */
3095 cache->prev_sp = sp;
3096 return cache;
3097 }
3098 else
3099 {
3100 /* Thread (process) stack used, use PSP as SP. */
3101 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3102 }
3103 }
3104 else
3105 {
3106 /* Main stack used, use MSP as SP. */
3107 unwound_sp = sp;
3108 }
3109
3110 /* The hardware saves eight 32-bit words, comprising xPSR,
3111 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3112 "B1.5.6 Exception entry behavior" in
3113 "ARMv7-M Architecture Reference Manual". */
3114 cache->saved_regs[0].set_addr (unwound_sp);
3115 cache->saved_regs[1].set_addr (unwound_sp + 4);
3116 cache->saved_regs[2].set_addr (unwound_sp + 8);
3117 cache->saved_regs[3].set_addr (unwound_sp + 12);
3118 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + 16);
3119 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 20);
3120 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 24);
3121 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 28);
3122
3123 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3124 type used. */
3125 extended_frame_used = ((lr & (1 << 4)) == 0);
3126 if (exc_return && extended_frame_used)
3127 {
3128 int i;
3129 int fpu_regs_stack_offset;
3130
3131 /* This code does not take into account the lazy stacking, see "Lazy
3132 context save of FP state", in B1.5.7, also ARM AN298, supported
3133 by Cortex-M4F architecture.
3134 To fully handle this the FPCCR register (Floating-point Context
3135 Control Register) needs to be read out and the bits ASPEN and LSPEN
3136 could be checked to setup correct lazy stacked FP registers.
3137 This register is located at address 0xE000EF34. */
3138
3139 /* Extended stack frame type used. */
3140 fpu_regs_stack_offset = unwound_sp + 0x20;
3141 for (i = 0; i < 16; i++)
3142 {
3143 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3144 fpu_regs_stack_offset += 4;
3145 }
3146 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + 0x60);
3147
3148 /* Offset 0x64 is reserved. */
3149 cache->prev_sp = unwound_sp + 0x68;
3150 }
3151 else
3152 {
3153 /* Standard stack frame type used. */
3154 cache->prev_sp = unwound_sp + 0x20;
3155 }
3156
3157 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3158 secure_stack_used = ((lr & (1 << 6)) != 0);
3159 if (exc_return && secure_stack_used)
3160 {
3161 /* ARMv8-M Exception and interrupt handling is not considered here.
3162 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3163 the Secure or Non-secure stack was used. To separate Secure and
3164 Non-secure stacks, processors that are based on the ARMv8-M
3165 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3166 In addition, a stack limit feature is provided using stack limit
3167 registers (accessible using MSR and MRS instructions) in Privileged
3168 level. */
3169 }
3170
3171 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3172 aligner between the top of the 32-byte stack frame and the
3173 previous context's stack pointer. */
3174 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3175 && (xpsr & (1 << 9)) != 0)
3176 cache->prev_sp += 4;
3177
3178 return cache;
3179 }
3180
3181 /* Implementation of function hook 'this_id' in
3182 'struct frame_uwnind'. */
3183
3184 static void
3185 arm_m_exception_this_id (struct frame_info *this_frame,
3186 void **this_cache,
3187 struct frame_id *this_id)
3188 {
3189 struct arm_prologue_cache *cache;
3190
3191 if (*this_cache == NULL)
3192 *this_cache = arm_m_exception_cache (this_frame);
3193 cache = (struct arm_prologue_cache *) *this_cache;
3194
3195 /* Our frame ID for a stub frame is the current SP and LR. */
3196 *this_id = frame_id_build (cache->prev_sp,
3197 get_frame_pc (this_frame));
3198 }
3199
3200 /* Implementation of function hook 'prev_register' in
3201 'struct frame_uwnind'. */
3202
3203 static struct value *
3204 arm_m_exception_prev_register (struct frame_info *this_frame,
3205 void **this_cache,
3206 int prev_regnum)
3207 {
3208 struct arm_prologue_cache *cache;
3209
3210 if (*this_cache == NULL)
3211 *this_cache = arm_m_exception_cache (this_frame);
3212 cache = (struct arm_prologue_cache *) *this_cache;
3213
3214 /* The value was already reconstructed into PREV_SP. */
3215 if (prev_regnum == ARM_SP_REGNUM)
3216 return frame_unwind_got_constant (this_frame, prev_regnum,
3217 cache->prev_sp);
3218
3219 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3220 prev_regnum);
3221 }
3222
3223 /* Implementation of function hook 'sniffer' in
3224 'struct frame_uwnind'. */
3225
3226 static int
3227 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3228 struct frame_info *this_frame,
3229 void **this_prologue_cache)
3230 {
3231 CORE_ADDR this_pc = get_frame_pc (this_frame);
3232
3233 /* No need to check is_m; this sniffer is only registered for
3234 M-profile architectures. */
3235
3236 /* Check if exception frame returns to a magic PC value. */
3237 return arm_m_addr_is_magic (this_pc);
3238 }
3239
3240 /* Frame unwinder for M-profile exceptions. */
3241
3242 struct frame_unwind arm_m_exception_unwind =
3243 {
3244 "arm m exception",
3245 SIGTRAMP_FRAME,
3246 default_frame_unwind_stop_reason,
3247 arm_m_exception_this_id,
3248 arm_m_exception_prev_register,
3249 NULL,
3250 arm_m_exception_unwind_sniffer
3251 };
3252
3253 static CORE_ADDR
3254 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3255 {
3256 struct arm_prologue_cache *cache;
3257
3258 if (*this_cache == NULL)
3259 *this_cache = arm_make_prologue_cache (this_frame);
3260 cache = (struct arm_prologue_cache *) *this_cache;
3261
3262 return cache->prev_sp - cache->framesize;
3263 }
3264
3265 struct frame_base arm_normal_base = {
3266 &arm_prologue_unwind,
3267 arm_normal_frame_base,
3268 arm_normal_frame_base,
3269 arm_normal_frame_base
3270 };
3271
3272 static struct value *
3273 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3274 int regnum)
3275 {
3276 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3277 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3278 CORE_ADDR lr, cpsr;
3279 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3280
3281 switch (regnum)
3282 {
3283 case ARM_PC_REGNUM:
3284 /* The PC is normally copied from the return column, which
3285 describes saves of LR. However, that version may have an
3286 extra bit set to indicate Thumb state. The bit is not
3287 part of the PC. */
3288
3289 /* Record in the frame whether the return address was signed. */
3290 if (tdep->have_pacbti)
3291 {
3292 CORE_ADDR ra_auth_code
3293 = frame_unwind_register_unsigned (this_frame,
3294 tdep->pacbti_pseudo_base);
3295
3296 if (ra_auth_code != 0)
3297 set_frame_previous_pc_masked (this_frame);
3298 }
3299
3300 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3301 return frame_unwind_got_constant (this_frame, regnum,
3302 arm_addr_bits_remove (gdbarch, lr));
3303
3304 case ARM_PS_REGNUM:
3305 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3306 cpsr = get_frame_register_unsigned (this_frame, regnum);
3307 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3308 if (IS_THUMB_ADDR (lr))
3309 cpsr |= t_bit;
3310 else
3311 cpsr &= ~t_bit;
3312 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3313
3314 default:
3315 internal_error (__FILE__, __LINE__,
3316 _("Unexpected register %d"), regnum);
3317 }
3318 }
3319
3320 /* Implement the stack_frame_destroyed_p gdbarch method. */
3321
3322 static int
3323 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3324 {
3325 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3326 unsigned int insn, insn2;
3327 int found_return = 0, found_stack_adjust = 0;
3328 CORE_ADDR func_start, func_end;
3329 CORE_ADDR scan_pc;
3330 gdb_byte buf[4];
3331
3332 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3333 return 0;
3334
3335 /* The epilogue is a sequence of instructions along the following lines:
3336
3337 - add stack frame size to SP or FP
3338 - [if frame pointer used] restore SP from FP
3339 - restore registers from SP [may include PC]
3340 - a return-type instruction [if PC wasn't already restored]
3341
3342 In a first pass, we scan forward from the current PC and verify the
3343 instructions we find as compatible with this sequence, ending in a
3344 return instruction.
3345
3346 However, this is not sufficient to distinguish indirect function calls
3347 within a function from indirect tail calls in the epilogue in some cases.
3348 Therefore, if we didn't already find any SP-changing instruction during
3349 forward scan, we add a backward scanning heuristic to ensure we actually
3350 are in the epilogue. */
3351
3352 scan_pc = pc;
3353 while (scan_pc < func_end && !found_return)
3354 {
3355 if (target_read_memory (scan_pc, buf, 2))
3356 break;
3357
3358 scan_pc += 2;
3359 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3360
3361 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3362 found_return = 1;
3363 else if (insn == 0x46f7) /* mov pc, lr */
3364 found_return = 1;
3365 else if (thumb_instruction_restores_sp (insn))
3366 {
3367 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3368 found_return = 1;
3369 }
3370 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3371 {
3372 if (target_read_memory (scan_pc, buf, 2))
3373 break;
3374
3375 scan_pc += 2;
3376 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3377
3378 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3379 {
3380 if (insn2 & 0x8000) /* <registers> include PC. */
3381 found_return = 1;
3382 }
3383 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3384 && (insn2 & 0x0fff) == 0x0b04)
3385 {
3386 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3387 found_return = 1;
3388 }
3389 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3390 && (insn2 & 0x0e00) == 0x0a00)
3391 ;
3392 else
3393 break;
3394 }
3395 else
3396 break;
3397 }
3398
3399 if (!found_return)
3400 return 0;
3401
3402 /* Since any instruction in the epilogue sequence, with the possible
3403 exception of return itself, updates the stack pointer, we need to
3404 scan backwards for at most one instruction. Try either a 16-bit or
3405 a 32-bit instruction. This is just a heuristic, so we do not worry
3406 too much about false positives. */
3407
3408 if (pc - 4 < func_start)
3409 return 0;
3410 if (target_read_memory (pc - 4, buf, 4))
3411 return 0;
3412
3413 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3414 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3415
3416 if (thumb_instruction_restores_sp (insn2))
3417 found_stack_adjust = 1;
3418 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3419 found_stack_adjust = 1;
3420 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3421 && (insn2 & 0x0fff) == 0x0b04)
3422 found_stack_adjust = 1;
3423 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3424 && (insn2 & 0x0e00) == 0x0a00)
3425 found_stack_adjust = 1;
3426
3427 return found_stack_adjust;
3428 }
3429
3430 static int
3431 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3432 {
3433 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3434 unsigned int insn;
3435 int found_return;
3436 CORE_ADDR func_start, func_end;
3437
3438 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3439 return 0;
3440
3441 /* We are in the epilogue if the previous instruction was a stack
3442 adjustment and the next instruction is a possible return (bx, mov
3443 pc, or pop). We could have to scan backwards to find the stack
3444 adjustment, or forwards to find the return, but this is a decent
3445 approximation. First scan forwards. */
3446
3447 found_return = 0;
3448 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3449 if (bits (insn, 28, 31) != INST_NV)
3450 {
3451 if ((insn & 0x0ffffff0) == 0x012fff10)
3452 /* BX. */
3453 found_return = 1;
3454 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3455 /* MOV PC. */
3456 found_return = 1;
3457 else if ((insn & 0x0fff0000) == 0x08bd0000
3458 && (insn & 0x0000c000) != 0)
3459 /* POP (LDMIA), including PC or LR. */
3460 found_return = 1;
3461 }
3462
3463 if (!found_return)
3464 return 0;
3465
3466 /* Scan backwards. This is just a heuristic, so do not worry about
3467 false positives from mode changes. */
3468
3469 if (pc < func_start + 4)
3470 return 0;
3471
3472 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3473 if (arm_instruction_restores_sp (insn))
3474 return 1;
3475
3476 return 0;
3477 }
3478
3479 /* Implement the stack_frame_destroyed_p gdbarch method. */
3480
3481 static int
3482 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3483 {
3484 if (arm_pc_is_thumb (gdbarch, pc))
3485 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3486 else
3487 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3488 }
3489
3490 /* When arguments must be pushed onto the stack, they go on in reverse
3491 order. The code below implements a FILO (stack) to do this. */
3492
3493 struct stack_item
3494 {
3495 int len;
3496 struct stack_item *prev;
3497 gdb_byte *data;
3498 };
3499
3500 static struct stack_item *
3501 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3502 {
3503 struct stack_item *si;
3504 si = XNEW (struct stack_item);
3505 si->data = (gdb_byte *) xmalloc (len);
3506 si->len = len;
3507 si->prev = prev;
3508 memcpy (si->data, contents, len);
3509 return si;
3510 }
3511
3512 static struct stack_item *
3513 pop_stack_item (struct stack_item *si)
3514 {
3515 struct stack_item *dead = si;
3516 si = si->prev;
3517 xfree (dead->data);
3518 xfree (dead);
3519 return si;
3520 }
3521
3522 /* Implement the gdbarch type alignment method, overrides the generic
3523 alignment algorithm for anything that is arm specific. */
3524
3525 static ULONGEST
3526 arm_type_align (gdbarch *gdbarch, struct type *t)
3527 {
3528 t = check_typedef (t);
3529 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3530 {
3531 /* Use the natural alignment for vector types (the same for
3532 scalar type), but the maximum alignment is 64-bit. */
3533 if (TYPE_LENGTH (t) > 8)
3534 return 8;
3535 else
3536 return TYPE_LENGTH (t);
3537 }
3538
3539 /* Allow the common code to calculate the alignment. */
3540 return 0;
3541 }
3542
3543 /* Possible base types for a candidate for passing and returning in
3544 VFP registers. */
3545
3546 enum arm_vfp_cprc_base_type
3547 {
3548 VFP_CPRC_UNKNOWN,
3549 VFP_CPRC_SINGLE,
3550 VFP_CPRC_DOUBLE,
3551 VFP_CPRC_VEC64,
3552 VFP_CPRC_VEC128
3553 };
3554
3555 /* The length of one element of base type B. */
3556
3557 static unsigned
3558 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3559 {
3560 switch (b)
3561 {
3562 case VFP_CPRC_SINGLE:
3563 return 4;
3564 case VFP_CPRC_DOUBLE:
3565 return 8;
3566 case VFP_CPRC_VEC64:
3567 return 8;
3568 case VFP_CPRC_VEC128:
3569 return 16;
3570 default:
3571 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3572 (int) b);
3573 }
3574 }
3575
3576 /* The character ('s', 'd' or 'q') for the type of VFP register used
3577 for passing base type B. */
3578
3579 static int
3580 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3581 {
3582 switch (b)
3583 {
3584 case VFP_CPRC_SINGLE:
3585 return 's';
3586 case VFP_CPRC_DOUBLE:
3587 return 'd';
3588 case VFP_CPRC_VEC64:
3589 return 'd';
3590 case VFP_CPRC_VEC128:
3591 return 'q';
3592 default:
3593 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3594 (int) b);
3595 }
3596 }
3597
3598 /* Determine whether T may be part of a candidate for passing and
3599 returning in VFP registers, ignoring the limit on the total number
3600 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3601 classification of the first valid component found; if it is not
3602 VFP_CPRC_UNKNOWN, all components must have the same classification
3603 as *BASE_TYPE. If it is found that T contains a type not permitted
3604 for passing and returning in VFP registers, a type differently
3605 classified from *BASE_TYPE, or two types differently classified
3606 from each other, return -1, otherwise return the total number of
3607 base-type elements found (possibly 0 in an empty structure or
3608 array). Vector types are not currently supported, matching the
3609 generic AAPCS support. */
3610
3611 static int
3612 arm_vfp_cprc_sub_candidate (struct type *t,
3613 enum arm_vfp_cprc_base_type *base_type)
3614 {
3615 t = check_typedef (t);
3616 switch (t->code ())
3617 {
3618 case TYPE_CODE_FLT:
3619 switch (TYPE_LENGTH (t))
3620 {
3621 case 4:
3622 if (*base_type == VFP_CPRC_UNKNOWN)
3623 *base_type = VFP_CPRC_SINGLE;
3624 else if (*base_type != VFP_CPRC_SINGLE)
3625 return -1;
3626 return 1;
3627
3628 case 8:
3629 if (*base_type == VFP_CPRC_UNKNOWN)
3630 *base_type = VFP_CPRC_DOUBLE;
3631 else if (*base_type != VFP_CPRC_DOUBLE)
3632 return -1;
3633 return 1;
3634
3635 default:
3636 return -1;
3637 }
3638 break;
3639
3640 case TYPE_CODE_COMPLEX:
3641 /* Arguments of complex T where T is one of the types float or
3642 double get treated as if they are implemented as:
3643
3644 struct complexT
3645 {
3646 T real;
3647 T imag;
3648 };
3649
3650 */
3651 switch (TYPE_LENGTH (t))
3652 {
3653 case 8:
3654 if (*base_type == VFP_CPRC_UNKNOWN)
3655 *base_type = VFP_CPRC_SINGLE;
3656 else if (*base_type != VFP_CPRC_SINGLE)
3657 return -1;
3658 return 2;
3659
3660 case 16:
3661 if (*base_type == VFP_CPRC_UNKNOWN)
3662 *base_type = VFP_CPRC_DOUBLE;
3663 else if (*base_type != VFP_CPRC_DOUBLE)
3664 return -1;
3665 return 2;
3666
3667 default:
3668 return -1;
3669 }
3670 break;
3671
3672 case TYPE_CODE_ARRAY:
3673 {
3674 if (t->is_vector ())
3675 {
3676 /* A 64-bit or 128-bit containerized vector type are VFP
3677 CPRCs. */
3678 switch (TYPE_LENGTH (t))
3679 {
3680 case 8:
3681 if (*base_type == VFP_CPRC_UNKNOWN)
3682 *base_type = VFP_CPRC_VEC64;
3683 return 1;
3684 case 16:
3685 if (*base_type == VFP_CPRC_UNKNOWN)
3686 *base_type = VFP_CPRC_VEC128;
3687 return 1;
3688 default:
3689 return -1;
3690 }
3691 }
3692 else
3693 {
3694 int count;
3695 unsigned unitlen;
3696
3697 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3698 base_type);
3699 if (count == -1)
3700 return -1;
3701 if (TYPE_LENGTH (t) == 0)
3702 {
3703 gdb_assert (count == 0);
3704 return 0;
3705 }
3706 else if (count == 0)
3707 return -1;
3708 unitlen = arm_vfp_cprc_unit_length (*base_type);
3709 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3710 return TYPE_LENGTH (t) / unitlen;
3711 }
3712 }
3713 break;
3714
3715 case TYPE_CODE_STRUCT:
3716 {
3717 int count = 0;
3718 unsigned unitlen;
3719 int i;
3720 for (i = 0; i < t->num_fields (); i++)
3721 {
3722 int sub_count = 0;
3723
3724 if (!field_is_static (&t->field (i)))
3725 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3726 base_type);
3727 if (sub_count == -1)
3728 return -1;
3729 count += sub_count;
3730 }
3731 if (TYPE_LENGTH (t) == 0)
3732 {
3733 gdb_assert (count == 0);
3734 return 0;
3735 }
3736 else if (count == 0)
3737 return -1;
3738 unitlen = arm_vfp_cprc_unit_length (*base_type);
3739 if (TYPE_LENGTH (t) != unitlen * count)
3740 return -1;
3741 return count;
3742 }
3743
3744 case TYPE_CODE_UNION:
3745 {
3746 int count = 0;
3747 unsigned unitlen;
3748 int i;
3749 for (i = 0; i < t->num_fields (); i++)
3750 {
3751 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3752 base_type);
3753 if (sub_count == -1)
3754 return -1;
3755 count = (count > sub_count ? count : sub_count);
3756 }
3757 if (TYPE_LENGTH (t) == 0)
3758 {
3759 gdb_assert (count == 0);
3760 return 0;
3761 }
3762 else if (count == 0)
3763 return -1;
3764 unitlen = arm_vfp_cprc_unit_length (*base_type);
3765 if (TYPE_LENGTH (t) != unitlen * count)
3766 return -1;
3767 return count;
3768 }
3769
3770 default:
3771 break;
3772 }
3773
3774 return -1;
3775 }
3776
3777 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3778 if passed to or returned from a non-variadic function with the VFP
3779 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3780 *BASE_TYPE to the base type for T and *COUNT to the number of
3781 elements of that base type before returning. */
3782
3783 static int
3784 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3785 int *count)
3786 {
3787 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3788 int c = arm_vfp_cprc_sub_candidate (t, &b);
3789 if (c <= 0 || c > 4)
3790 return 0;
3791 *base_type = b;
3792 *count = c;
3793 return 1;
3794 }
3795
3796 /* Return 1 if the VFP ABI should be used for passing arguments to and
3797 returning values from a function of type FUNC_TYPE, 0
3798 otherwise. */
3799
3800 static int
3801 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3802 {
3803 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3804
3805 /* Variadic functions always use the base ABI. Assume that functions
3806 without debug info are not variadic. */
3807 if (func_type && check_typedef (func_type)->has_varargs ())
3808 return 0;
3809
3810 /* The VFP ABI is only supported as a variant of AAPCS. */
3811 if (tdep->arm_abi != ARM_ABI_AAPCS)
3812 return 0;
3813
3814 return tdep->fp_model == ARM_FLOAT_VFP;
3815 }
3816
3817 /* We currently only support passing parameters in integer registers, which
3818 conforms with GCC's default model, and VFP argument passing following
3819 the VFP variant of AAPCS. Several other variants exist and
3820 we should probably support some of them based on the selected ABI. */
3821
3822 static CORE_ADDR
3823 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3824 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3825 struct value **args, CORE_ADDR sp,
3826 function_call_return_method return_method,
3827 CORE_ADDR struct_addr)
3828 {
3829 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3830 int argnum;
3831 int argreg;
3832 int nstack;
3833 struct stack_item *si = NULL;
3834 int use_vfp_abi;
3835 struct type *ftype;
3836 unsigned vfp_regs_free = (1 << 16) - 1;
3837 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3838
3839 /* Determine the type of this function and whether the VFP ABI
3840 applies. */
3841 ftype = check_typedef (value_type (function));
3842 if (ftype->code () == TYPE_CODE_PTR)
3843 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3844 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3845
3846 /* Set the return address. For the ARM, the return breakpoint is
3847 always at BP_ADDR. */
3848 if (arm_pc_is_thumb (gdbarch, bp_addr))
3849 bp_addr |= 1;
3850 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3851
3852 /* Walk through the list of args and determine how large a temporary
3853 stack is required. Need to take care here as structs may be
3854 passed on the stack, and we have to push them. */
3855 nstack = 0;
3856
3857 argreg = ARM_A1_REGNUM;
3858 nstack = 0;
3859
3860 /* The struct_return pointer occupies the first parameter
3861 passing register. */
3862 if (return_method == return_method_struct)
3863 {
3864 arm_debug_printf ("struct return in %s = %s",
3865 gdbarch_register_name (gdbarch, argreg),
3866 paddress (gdbarch, struct_addr));
3867
3868 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3869 argreg++;
3870 }
3871
3872 for (argnum = 0; argnum < nargs; argnum++)
3873 {
3874 int len;
3875 struct type *arg_type;
3876 struct type *target_type;
3877 enum type_code typecode;
3878 const bfd_byte *val;
3879 int align;
3880 enum arm_vfp_cprc_base_type vfp_base_type;
3881 int vfp_base_count;
3882 int may_use_core_reg = 1;
3883
3884 arg_type = check_typedef (value_type (args[argnum]));
3885 len = TYPE_LENGTH (arg_type);
3886 target_type = TYPE_TARGET_TYPE (arg_type);
3887 typecode = arg_type->code ();
3888 val = value_contents (args[argnum]).data ();
3889
3890 align = type_align (arg_type);
3891 /* Round alignment up to a whole number of words. */
3892 align = (align + ARM_INT_REGISTER_SIZE - 1)
3893 & ~(ARM_INT_REGISTER_SIZE - 1);
3894 /* Different ABIs have different maximum alignments. */
3895 if (tdep->arm_abi == ARM_ABI_APCS)
3896 {
3897 /* The APCS ABI only requires word alignment. */
3898 align = ARM_INT_REGISTER_SIZE;
3899 }
3900 else
3901 {
3902 /* The AAPCS requires at most doubleword alignment. */
3903 if (align > ARM_INT_REGISTER_SIZE * 2)
3904 align = ARM_INT_REGISTER_SIZE * 2;
3905 }
3906
3907 if (use_vfp_abi
3908 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3909 &vfp_base_count))
3910 {
3911 int regno;
3912 int unit_length;
3913 int shift;
3914 unsigned mask;
3915
3916 /* Because this is a CPRC it cannot go in a core register or
3917 cause a core register to be skipped for alignment.
3918 Either it goes in VFP registers and the rest of this loop
3919 iteration is skipped for this argument, or it goes on the
3920 stack (and the stack alignment code is correct for this
3921 case). */
3922 may_use_core_reg = 0;
3923
3924 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3925 shift = unit_length / 4;
3926 mask = (1 << (shift * vfp_base_count)) - 1;
3927 for (regno = 0; regno < 16; regno += shift)
3928 if (((vfp_regs_free >> regno) & mask) == mask)
3929 break;
3930
3931 if (regno < 16)
3932 {
3933 int reg_char;
3934 int reg_scaled;
3935 int i;
3936
3937 vfp_regs_free &= ~(mask << regno);
3938 reg_scaled = regno / shift;
3939 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3940 for (i = 0; i < vfp_base_count; i++)
3941 {
3942 char name_buf[4];
3943 int regnum;
3944 if (reg_char == 'q')
3945 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3946 val + i * unit_length);
3947 else
3948 {
3949 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3950 reg_char, reg_scaled + i);
3951 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3952 strlen (name_buf));
3953 regcache->cooked_write (regnum, val + i * unit_length);
3954 }
3955 }
3956 continue;
3957 }
3958 else
3959 {
3960 /* This CPRC could not go in VFP registers, so all VFP
3961 registers are now marked as used. */
3962 vfp_regs_free = 0;
3963 }
3964 }
3965
3966 /* Push stack padding for doubleword alignment. */
3967 if (nstack & (align - 1))
3968 {
3969 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3970 nstack += ARM_INT_REGISTER_SIZE;
3971 }
3972
3973 /* Doubleword aligned quantities must go in even register pairs. */
3974 if (may_use_core_reg
3975 && argreg <= ARM_LAST_ARG_REGNUM
3976 && align > ARM_INT_REGISTER_SIZE
3977 && argreg & 1)
3978 argreg++;
3979
3980 /* If the argument is a pointer to a function, and it is a
3981 Thumb function, create a LOCAL copy of the value and set
3982 the THUMB bit in it. */
3983 if (TYPE_CODE_PTR == typecode
3984 && target_type != NULL
3985 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3986 {
3987 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3988 if (arm_pc_is_thumb (gdbarch, regval))
3989 {
3990 bfd_byte *copy = (bfd_byte *) alloca (len);
3991 store_unsigned_integer (copy, len, byte_order,
3992 MAKE_THUMB_ADDR (regval));
3993 val = copy;
3994 }
3995 }
3996
3997 /* Copy the argument to general registers or the stack in
3998 register-sized pieces. Large arguments are split between
3999 registers and stack. */
4000 while (len > 0)
4001 {
4002 int partial_len = len < ARM_INT_REGISTER_SIZE
4003 ? len : ARM_INT_REGISTER_SIZE;
4004 CORE_ADDR regval
4005 = extract_unsigned_integer (val, partial_len, byte_order);
4006
4007 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4008 {
4009 /* The argument is being passed in a general purpose
4010 register. */
4011 if (byte_order == BFD_ENDIAN_BIG)
4012 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4013
4014 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4015 gdbarch_register_name (gdbarch, argreg),
4016 phex (regval, ARM_INT_REGISTER_SIZE));
4017
4018 regcache_cooked_write_unsigned (regcache, argreg, regval);
4019 argreg++;
4020 }
4021 else
4022 {
4023 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4024
4025 memset (buf, 0, sizeof (buf));
4026 store_unsigned_integer (buf, partial_len, byte_order, regval);
4027
4028 /* Push the arguments onto the stack. */
4029 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4030 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4031 nstack += ARM_INT_REGISTER_SIZE;
4032 }
4033
4034 len -= partial_len;
4035 val += partial_len;
4036 }
4037 }
4038 /* If we have an odd number of words to push, then decrement the stack
4039 by one word now, so first stack argument will be dword aligned. */
4040 if (nstack & 4)
4041 sp -= 4;
4042
4043 while (si)
4044 {
4045 sp -= si->len;
4046 write_memory (sp, si->data, si->len);
4047 si = pop_stack_item (si);
4048 }
4049
4050 /* Finally, update teh SP register. */
4051 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4052
4053 return sp;
4054 }
4055
4056
4057 /* Always align the frame to an 8-byte boundary. This is required on
4058 some platforms and harmless on the rest. */
4059
4060 static CORE_ADDR
4061 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4062 {
4063 /* Align the stack to eight bytes. */
4064 return sp & ~ (CORE_ADDR) 7;
4065 }
4066
4067 static void
4068 print_fpu_flags (struct ui_file *file, int flags)
4069 {
4070 if (flags & (1 << 0))
4071 gdb_puts ("IVO ", file);
4072 if (flags & (1 << 1))
4073 gdb_puts ("DVZ ", file);
4074 if (flags & (1 << 2))
4075 gdb_puts ("OFL ", file);
4076 if (flags & (1 << 3))
4077 gdb_puts ("UFL ", file);
4078 if (flags & (1 << 4))
4079 gdb_puts ("INX ", file);
4080 gdb_putc ('\n', file);
4081 }
4082
4083 /* Print interesting information about the floating point processor
4084 (if present) or emulator. */
4085 static void
4086 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4087 struct frame_info *frame, const char *args)
4088 {
4089 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4090 int type;
4091
4092 type = (status >> 24) & 127;
4093 if (status & (1 << 31))
4094 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4095 else
4096 gdb_printf (file, _("Software FPU type %d\n"), type);
4097 /* i18n: [floating point unit] mask */
4098 gdb_puts (_("mask: "), file);
4099 print_fpu_flags (file, status >> 16);
4100 /* i18n: [floating point unit] flags */
4101 gdb_puts (_("flags: "), file);
4102 print_fpu_flags (file, status);
4103 }
4104
4105 /* Construct the ARM extended floating point type. */
4106 static struct type *
4107 arm_ext_type (struct gdbarch *gdbarch)
4108 {
4109 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4110
4111 if (!tdep->arm_ext_type)
4112 tdep->arm_ext_type
4113 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4114 floatformats_arm_ext);
4115
4116 return tdep->arm_ext_type;
4117 }
4118
4119 static struct type *
4120 arm_neon_double_type (struct gdbarch *gdbarch)
4121 {
4122 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4123
4124 if (tdep->neon_double_type == NULL)
4125 {
4126 struct type *t, *elem;
4127
4128 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4129 TYPE_CODE_UNION);
4130 elem = builtin_type (gdbarch)->builtin_uint8;
4131 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4132 elem = builtin_type (gdbarch)->builtin_uint16;
4133 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4134 elem = builtin_type (gdbarch)->builtin_uint32;
4135 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4136 elem = builtin_type (gdbarch)->builtin_uint64;
4137 append_composite_type_field (t, "u64", elem);
4138 elem = builtin_type (gdbarch)->builtin_float;
4139 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4140 elem = builtin_type (gdbarch)->builtin_double;
4141 append_composite_type_field (t, "f64", elem);
4142
4143 t->set_is_vector (true);
4144 t->set_name ("neon_d");
4145 tdep->neon_double_type = t;
4146 }
4147
4148 return tdep->neon_double_type;
4149 }
4150
4151 /* FIXME: The vector types are not correctly ordered on big-endian
4152 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4153 bits of d0 - regardless of what unit size is being held in d0. So
4154 the offset of the first uint8 in d0 is 7, but the offset of the
4155 first float is 4. This code works as-is for little-endian
4156 targets. */
4157
4158 static struct type *
4159 arm_neon_quad_type (struct gdbarch *gdbarch)
4160 {
4161 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4162
4163 if (tdep->neon_quad_type == NULL)
4164 {
4165 struct type *t, *elem;
4166
4167 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4168 TYPE_CODE_UNION);
4169 elem = builtin_type (gdbarch)->builtin_uint8;
4170 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4171 elem = builtin_type (gdbarch)->builtin_uint16;
4172 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4173 elem = builtin_type (gdbarch)->builtin_uint32;
4174 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4175 elem = builtin_type (gdbarch)->builtin_uint64;
4176 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4177 elem = builtin_type (gdbarch)->builtin_float;
4178 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4179 elem = builtin_type (gdbarch)->builtin_double;
4180 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4181
4182 t->set_is_vector (true);
4183 t->set_name ("neon_q");
4184 tdep->neon_quad_type = t;
4185 }
4186
4187 return tdep->neon_quad_type;
4188 }
4189
4190 /* Return true if REGNUM is a Q pseudo register. Return false
4191 otherwise.
4192
4193 REGNUM is the raw register number and not a pseudo-relative register
4194 number. */
4195
4196 static bool
4197 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4198 {
4199 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4200
4201 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4202 MVE (Q0~Q7) features. */
4203 if (tdep->have_q_pseudos
4204 && regnum >= tdep->q_pseudo_base
4205 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4206 return true;
4207
4208 return false;
4209 }
4210
4211 /* Return true if REGNUM is a VFP S pseudo register. Return false
4212 otherwise.
4213
4214 REGNUM is the raw register number and not a pseudo-relative register
4215 number. */
4216
4217 static bool
4218 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4219 {
4220 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4221
4222 if (tdep->have_s_pseudos
4223 && regnum >= tdep->s_pseudo_base
4224 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4225 return true;
4226
4227 return false;
4228 }
4229
4230 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4231 otherwise.
4232
4233 REGNUM is the raw register number and not a pseudo-relative register
4234 number. */
4235
4236 static bool
4237 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4238 {
4239 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4240
4241 if (tdep->have_mve
4242 && regnum >= tdep->mve_pseudo_base
4243 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4244 return true;
4245
4246 return false;
4247 }
4248
4249 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
4250 false otherwise.
4251
4252 REGNUM is the raw register number and not a pseudo-relative register
4253 number. */
4254
4255 static bool
4256 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
4257 {
4258 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4259
4260 if (tdep->have_pacbti
4261 && regnum >= tdep->pacbti_pseudo_base
4262 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
4263 return true;
4264
4265 return false;
4266 }
4267
4268 /* Return the GDB type object for the "standard" data type of data in
4269 register N. */
4270
4271 static struct type *
4272 arm_register_type (struct gdbarch *gdbarch, int regnum)
4273 {
4274 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4275
4276 if (is_s_pseudo (gdbarch, regnum))
4277 return builtin_type (gdbarch)->builtin_float;
4278
4279 if (is_q_pseudo (gdbarch, regnum))
4280 return arm_neon_quad_type (gdbarch);
4281
4282 if (is_mve_pseudo (gdbarch, regnum))
4283 return builtin_type (gdbarch)->builtin_int16;
4284
4285 if (is_pacbti_pseudo (gdbarch, regnum))
4286 return builtin_type (gdbarch)->builtin_uint32;
4287
4288 /* If the target description has register information, we are only
4289 in this function so that we can override the types of
4290 double-precision registers for NEON. */
4291 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4292 {
4293 struct type *t = tdesc_register_type (gdbarch, regnum);
4294
4295 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4296 && t->code () == TYPE_CODE_FLT
4297 && tdep->have_neon)
4298 return arm_neon_double_type (gdbarch);
4299 else
4300 return t;
4301 }
4302
4303 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4304 {
4305 if (!tdep->have_fpa_registers)
4306 return builtin_type (gdbarch)->builtin_void;
4307
4308 return arm_ext_type (gdbarch);
4309 }
4310 else if (regnum == ARM_SP_REGNUM)
4311 return builtin_type (gdbarch)->builtin_data_ptr;
4312 else if (regnum == ARM_PC_REGNUM)
4313 return builtin_type (gdbarch)->builtin_func_ptr;
4314 else if (regnum >= ARRAY_SIZE (arm_register_names))
4315 /* These registers are only supported on targets which supply
4316 an XML description. */
4317 return builtin_type (gdbarch)->builtin_int0;
4318 else
4319 return builtin_type (gdbarch)->builtin_uint32;
4320 }
4321
4322 /* Map a DWARF register REGNUM onto the appropriate GDB register
4323 number. */
4324
4325 static int
4326 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4327 {
4328 /* Core integer regs. */
4329 if (reg >= 0 && reg <= 15)
4330 return reg;
4331
4332 /* Legacy FPA encoding. These were once used in a way which
4333 overlapped with VFP register numbering, so their use is
4334 discouraged, but GDB doesn't support the ARM toolchain
4335 which used them for VFP. */
4336 if (reg >= 16 && reg <= 23)
4337 return ARM_F0_REGNUM + reg - 16;
4338
4339 /* New assignments for the FPA registers. */
4340 if (reg >= 96 && reg <= 103)
4341 return ARM_F0_REGNUM + reg - 96;
4342
4343 /* WMMX register assignments. */
4344 if (reg >= 104 && reg <= 111)
4345 return ARM_WCGR0_REGNUM + reg - 104;
4346
4347 if (reg >= 112 && reg <= 127)
4348 return ARM_WR0_REGNUM + reg - 112;
4349
4350 /* PACBTI register containing the Pointer Authentication Code. */
4351 if (reg == ARM_DWARF_RA_AUTH_CODE)
4352 {
4353 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4354
4355 if (tdep->have_pacbti)
4356 return tdep->pacbti_pseudo_base;
4357
4358 return -1;
4359 }
4360
4361 if (reg >= 192 && reg <= 199)
4362 return ARM_WC0_REGNUM + reg - 192;
4363
4364 /* VFP v2 registers. A double precision value is actually
4365 in d1 rather than s2, but the ABI only defines numbering
4366 for the single precision registers. This will "just work"
4367 in GDB for little endian targets (we'll read eight bytes,
4368 starting in s0 and then progressing to s1), but will be
4369 reversed on big endian targets with VFP. This won't
4370 be a problem for the new Neon quad registers; you're supposed
4371 to use DW_OP_piece for those. */
4372 if (reg >= 64 && reg <= 95)
4373 {
4374 char name_buf[4];
4375
4376 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4377 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4378 strlen (name_buf));
4379 }
4380
4381 /* VFP v3 / Neon registers. This range is also used for VFP v2
4382 registers, except that it now describes d0 instead of s0. */
4383 if (reg >= 256 && reg <= 287)
4384 {
4385 char name_buf[4];
4386
4387 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4388 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4389 strlen (name_buf));
4390 }
4391
4392 return -1;
4393 }
4394
4395 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4396 static int
4397 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4398 {
4399 int reg = regnum;
4400 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4401
4402 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4403 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4404
4405 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4406 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4407
4408 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4409 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4410
4411 if (reg < NUM_GREGS)
4412 return SIM_ARM_R0_REGNUM + reg;
4413 reg -= NUM_GREGS;
4414
4415 if (reg < NUM_FREGS)
4416 return SIM_ARM_FP0_REGNUM + reg;
4417 reg -= NUM_FREGS;
4418
4419 if (reg < NUM_SREGS)
4420 return SIM_ARM_FPS_REGNUM + reg;
4421 reg -= NUM_SREGS;
4422
4423 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4424 }
4425
4426 static const unsigned char op_lit0 = DW_OP_lit0;
4427
4428 static void
4429 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
4430 struct dwarf2_frame_state_reg *reg,
4431 struct frame_info *this_frame)
4432 {
4433 if (is_pacbti_pseudo (gdbarch, regnum))
4434 {
4435 /* Initialize RA_AUTH_CODE to zero. */
4436 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
4437 reg->loc.exp.start = &op_lit0;
4438 reg->loc.exp.len = 1;
4439 return;
4440 }
4441
4442 switch (regnum)
4443 {
4444 case ARM_PC_REGNUM:
4445 case ARM_PS_REGNUM:
4446 reg->how = DWARF2_FRAME_REG_FN;
4447 reg->loc.fn = arm_dwarf2_prev_register;
4448 break;
4449 case ARM_SP_REGNUM:
4450 reg->how = DWARF2_FRAME_REG_CFA;
4451 break;
4452 }
4453 }
4454
4455 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4456 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4457 NULL if an error occurs. BUF is freed. */
4458
4459 static gdb_byte *
4460 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4461 int old_len, int new_len)
4462 {
4463 gdb_byte *new_buf;
4464 int bytes_to_read = new_len - old_len;
4465
4466 new_buf = (gdb_byte *) xmalloc (new_len);
4467 memcpy (new_buf + bytes_to_read, buf, old_len);
4468 xfree (buf);
4469 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4470 {
4471 xfree (new_buf);
4472 return NULL;
4473 }
4474 return new_buf;
4475 }
4476
4477 /* An IT block is at most the 2-byte IT instruction followed by
4478 four 4-byte instructions. The furthest back we must search to
4479 find an IT block that affects the current instruction is thus
4480 2 + 3 * 4 == 14 bytes. */
4481 #define MAX_IT_BLOCK_PREFIX 14
4482
4483 /* Use a quick scan if there are more than this many bytes of
4484 code. */
4485 #define IT_SCAN_THRESHOLD 32
4486
4487 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4488 A breakpoint in an IT block may not be hit, depending on the
4489 condition flags. */
4490 static CORE_ADDR
4491 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4492 {
4493 gdb_byte *buf;
4494 char map_type;
4495 CORE_ADDR boundary, func_start;
4496 int buf_len;
4497 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4498 int i, any, last_it, last_it_count;
4499 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4500
4501 /* If we are using BKPT breakpoints, none of this is necessary. */
4502 if (tdep->thumb2_breakpoint == NULL)
4503 return bpaddr;
4504
4505 /* ARM mode does not have this problem. */
4506 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4507 return bpaddr;
4508
4509 /* We are setting a breakpoint in Thumb code that could potentially
4510 contain an IT block. The first step is to find how much Thumb
4511 code there is; we do not need to read outside of known Thumb
4512 sequences. */
4513 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4514 if (map_type == 0)
4515 /* Thumb-2 code must have mapping symbols to have a chance. */
4516 return bpaddr;
4517
4518 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4519
4520 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4521 && func_start > boundary)
4522 boundary = func_start;
4523
4524 /* Search for a candidate IT instruction. We have to do some fancy
4525 footwork to distinguish a real IT instruction from the second
4526 half of a 32-bit instruction, but there is no need for that if
4527 there's no candidate. */
4528 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4529 if (buf_len == 0)
4530 /* No room for an IT instruction. */
4531 return bpaddr;
4532
4533 buf = (gdb_byte *) xmalloc (buf_len);
4534 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4535 return bpaddr;
4536 any = 0;
4537 for (i = 0; i < buf_len; i += 2)
4538 {
4539 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4540 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4541 {
4542 any = 1;
4543 break;
4544 }
4545 }
4546
4547 if (any == 0)
4548 {
4549 xfree (buf);
4550 return bpaddr;
4551 }
4552
4553 /* OK, the code bytes before this instruction contain at least one
4554 halfword which resembles an IT instruction. We know that it's
4555 Thumb code, but there are still two possibilities. Either the
4556 halfword really is an IT instruction, or it is the second half of
4557 a 32-bit Thumb instruction. The only way we can tell is to
4558 scan forwards from a known instruction boundary. */
4559 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4560 {
4561 int definite;
4562
4563 /* There's a lot of code before this instruction. Start with an
4564 optimistic search; it's easy to recognize halfwords that can
4565 not be the start of a 32-bit instruction, and use that to
4566 lock on to the instruction boundaries. */
4567 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4568 if (buf == NULL)
4569 return bpaddr;
4570 buf_len = IT_SCAN_THRESHOLD;
4571
4572 definite = 0;
4573 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4574 {
4575 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4576 if (thumb_insn_size (inst1) == 2)
4577 {
4578 definite = 1;
4579 break;
4580 }
4581 }
4582
4583 /* At this point, if DEFINITE, BUF[I] is the first place we
4584 are sure that we know the instruction boundaries, and it is far
4585 enough from BPADDR that we could not miss an IT instruction
4586 affecting BPADDR. If ! DEFINITE, give up - start from a
4587 known boundary. */
4588 if (! definite)
4589 {
4590 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4591 bpaddr - boundary);
4592 if (buf == NULL)
4593 return bpaddr;
4594 buf_len = bpaddr - boundary;
4595 i = 0;
4596 }
4597 }
4598 else
4599 {
4600 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4601 if (buf == NULL)
4602 return bpaddr;
4603 buf_len = bpaddr - boundary;
4604 i = 0;
4605 }
4606
4607 /* Scan forwards. Find the last IT instruction before BPADDR. */
4608 last_it = -1;
4609 last_it_count = 0;
4610 while (i < buf_len)
4611 {
4612 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4613 last_it_count--;
4614 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4615 {
4616 last_it = i;
4617 if (inst1 & 0x0001)
4618 last_it_count = 4;
4619 else if (inst1 & 0x0002)
4620 last_it_count = 3;
4621 else if (inst1 & 0x0004)
4622 last_it_count = 2;
4623 else
4624 last_it_count = 1;
4625 }
4626 i += thumb_insn_size (inst1);
4627 }
4628
4629 xfree (buf);
4630
4631 if (last_it == -1)
4632 /* There wasn't really an IT instruction after all. */
4633 return bpaddr;
4634
4635 if (last_it_count < 1)
4636 /* It was too far away. */
4637 return bpaddr;
4638
4639 /* This really is a trouble spot. Move the breakpoint to the IT
4640 instruction. */
4641 return bpaddr - buf_len + last_it;
4642 }
4643
4644 /* ARM displaced stepping support.
4645
4646 Generally ARM displaced stepping works as follows:
4647
4648 1. When an instruction is to be single-stepped, it is first decoded by
4649 arm_process_displaced_insn. Depending on the type of instruction, it is
4650 then copied to a scratch location, possibly in a modified form. The
4651 copy_* set of functions performs such modification, as necessary. A
4652 breakpoint is placed after the modified instruction in the scratch space
4653 to return control to GDB. Note in particular that instructions which
4654 modify the PC will no longer do so after modification.
4655
4656 2. The instruction is single-stepped, by setting the PC to the scratch
4657 location address, and resuming. Control returns to GDB when the
4658 breakpoint is hit.
4659
4660 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4661 function used for the current instruction. This function's job is to
4662 put the CPU/memory state back to what it would have been if the
4663 instruction had been executed unmodified in its original location. */
4664
4665 /* NOP instruction (mov r0, r0). */
4666 #define ARM_NOP 0xe1a00000
4667 #define THUMB_NOP 0x4600
4668
4669 /* Helper for register reads for displaced stepping. In particular, this
4670 returns the PC as it would be seen by the instruction at its original
4671 location. */
4672
4673 ULONGEST
4674 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4675 int regno)
4676 {
4677 ULONGEST ret;
4678 CORE_ADDR from = dsc->insn_addr;
4679
4680 if (regno == ARM_PC_REGNUM)
4681 {
4682 /* Compute pipeline offset:
4683 - When executing an ARM instruction, PC reads as the address of the
4684 current instruction plus 8.
4685 - When executing a Thumb instruction, PC reads as the address of the
4686 current instruction plus 4. */
4687
4688 if (!dsc->is_thumb)
4689 from += 8;
4690 else
4691 from += 4;
4692
4693 displaced_debug_printf ("read pc value %.8lx",
4694 (unsigned long) from);
4695 return (ULONGEST) from;
4696 }
4697 else
4698 {
4699 regcache_cooked_read_unsigned (regs, regno, &ret);
4700
4701 displaced_debug_printf ("read r%d value %.8lx",
4702 regno, (unsigned long) ret);
4703
4704 return ret;
4705 }
4706 }
4707
4708 static int
4709 displaced_in_arm_mode (struct regcache *regs)
4710 {
4711 ULONGEST ps;
4712 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4713
4714 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4715
4716 return (ps & t_bit) == 0;
4717 }
4718
4719 /* Write to the PC as from a branch instruction. */
4720
4721 static void
4722 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4723 ULONGEST val)
4724 {
4725 if (!dsc->is_thumb)
4726 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4727 architecture versions < 6. */
4728 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4729 val & ~(ULONGEST) 0x3);
4730 else
4731 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4732 val & ~(ULONGEST) 0x1);
4733 }
4734
4735 /* Write to the PC as from a branch-exchange instruction. */
4736
4737 static void
4738 bx_write_pc (struct regcache *regs, ULONGEST val)
4739 {
4740 ULONGEST ps;
4741 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4742
4743 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4744
4745 if ((val & 1) == 1)
4746 {
4747 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4748 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4749 }
4750 else if ((val & 2) == 0)
4751 {
4752 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4753 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4754 }
4755 else
4756 {
4757 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4758 mode, align dest to 4 bytes). */
4759 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4760 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4761 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4762 }
4763 }
4764
4765 /* Write to the PC as if from a load instruction. */
4766
4767 static void
4768 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4769 ULONGEST val)
4770 {
4771 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4772 bx_write_pc (regs, val);
4773 else
4774 branch_write_pc (regs, dsc, val);
4775 }
4776
4777 /* Write to the PC as if from an ALU instruction. */
4778
4779 static void
4780 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4781 ULONGEST val)
4782 {
4783 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4784 bx_write_pc (regs, val);
4785 else
4786 branch_write_pc (regs, dsc, val);
4787 }
4788
4789 /* Helper for writing to registers for displaced stepping. Writing to the PC
4790 has a varying effects depending on the instruction which does the write:
4791 this is controlled by the WRITE_PC argument. */
4792
4793 void
4794 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4795 int regno, ULONGEST val, enum pc_write_style write_pc)
4796 {
4797 if (regno == ARM_PC_REGNUM)
4798 {
4799 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4800
4801 switch (write_pc)
4802 {
4803 case BRANCH_WRITE_PC:
4804 branch_write_pc (regs, dsc, val);
4805 break;
4806
4807 case BX_WRITE_PC:
4808 bx_write_pc (regs, val);
4809 break;
4810
4811 case LOAD_WRITE_PC:
4812 load_write_pc (regs, dsc, val);
4813 break;
4814
4815 case ALU_WRITE_PC:
4816 alu_write_pc (regs, dsc, val);
4817 break;
4818
4819 case CANNOT_WRITE_PC:
4820 warning (_("Instruction wrote to PC in an unexpected way when "
4821 "single-stepping"));
4822 break;
4823
4824 default:
4825 internal_error (__FILE__, __LINE__,
4826 _("Invalid argument to displaced_write_reg"));
4827 }
4828
4829 dsc->wrote_to_pc = 1;
4830 }
4831 else
4832 {
4833 displaced_debug_printf ("writing r%d value %.8lx",
4834 regno, (unsigned long) val);
4835 regcache_cooked_write_unsigned (regs, regno, val);
4836 }
4837 }
4838
4839 /* This function is used to concisely determine if an instruction INSN
4840 references PC. Register fields of interest in INSN should have the
4841 corresponding fields of BITMASK set to 0b1111. The function
4842 returns return 1 if any of these fields in INSN reference the PC
4843 (also 0b1111, r15), else it returns 0. */
4844
4845 static int
4846 insn_references_pc (uint32_t insn, uint32_t bitmask)
4847 {
4848 uint32_t lowbit = 1;
4849
4850 while (bitmask != 0)
4851 {
4852 uint32_t mask;
4853
4854 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4855 ;
4856
4857 if (!lowbit)
4858 break;
4859
4860 mask = lowbit * 0xf;
4861
4862 if ((insn & mask) == mask)
4863 return 1;
4864
4865 bitmask &= ~mask;
4866 }
4867
4868 return 0;
4869 }
4870
4871 /* The simplest copy function. Many instructions have the same effect no
4872 matter what address they are executed at: in those cases, use this. */
4873
4874 static int
4875 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
4876 arm_displaced_step_copy_insn_closure *dsc)
4877 {
4878 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4879 (unsigned long) insn, iname);
4880
4881 dsc->modinsn[0] = insn;
4882
4883 return 0;
4884 }
4885
4886 static int
4887 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4888 uint16_t insn2, const char *iname,
4889 arm_displaced_step_copy_insn_closure *dsc)
4890 {
4891 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4892 "unmodified", insn1, insn2, iname);
4893
4894 dsc->modinsn[0] = insn1;
4895 dsc->modinsn[1] = insn2;
4896 dsc->numinsns = 2;
4897
4898 return 0;
4899 }
4900
4901 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4902 modification. */
4903 static int
4904 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4905 const char *iname,
4906 arm_displaced_step_copy_insn_closure *dsc)
4907 {
4908 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4909 insn, iname);
4910
4911 dsc->modinsn[0] = insn;
4912
4913 return 0;
4914 }
4915
4916 /* Preload instructions with immediate offset. */
4917
4918 static void
4919 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
4920 arm_displaced_step_copy_insn_closure *dsc)
4921 {
4922 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4923 if (!dsc->u.preload.immed)
4924 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4925 }
4926
4927 static void
4928 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4929 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4930 {
4931 ULONGEST rn_val;
4932 /* Preload instructions:
4933
4934 {pli/pld} [rn, #+/-imm]
4935 ->
4936 {pli/pld} [r0, #+/-imm]. */
4937
4938 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4939 rn_val = displaced_read_reg (regs, dsc, rn);
4940 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4941 dsc->u.preload.immed = 1;
4942
4943 dsc->cleanup = &cleanup_preload;
4944 }
4945
4946 static int
4947 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4948 arm_displaced_step_copy_insn_closure *dsc)
4949 {
4950 unsigned int rn = bits (insn, 16, 19);
4951
4952 if (!insn_references_pc (insn, 0x000f0000ul))
4953 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4954
4955 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4956
4957 dsc->modinsn[0] = insn & 0xfff0ffff;
4958
4959 install_preload (gdbarch, regs, dsc, rn);
4960
4961 return 0;
4962 }
4963
4964 static int
4965 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4966 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4967 {
4968 unsigned int rn = bits (insn1, 0, 3);
4969 unsigned int u_bit = bit (insn1, 7);
4970 int imm12 = bits (insn2, 0, 11);
4971 ULONGEST pc_val;
4972
4973 if (rn != ARM_PC_REGNUM)
4974 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4975
4976 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4977 PLD (literal) Encoding T1. */
4978 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4979 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4980 imm12);
4981
4982 if (!u_bit)
4983 imm12 = -1 * imm12;
4984
4985 /* Rewrite instruction {pli/pld} PC imm12 into:
4986 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4987
4988 {pli/pld} [r0, r1]
4989
4990 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4991
4992 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4993 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4994
4995 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4996
4997 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4998 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4999 dsc->u.preload.immed = 0;
5000
5001 /* {pli/pld} [r0, r1] */
5002 dsc->modinsn[0] = insn1 & 0xfff0;
5003 dsc->modinsn[1] = 0xf001;
5004 dsc->numinsns = 2;
5005
5006 dsc->cleanup = &cleanup_preload;
5007 return 0;
5008 }
5009
5010 /* Preload instructions with register offset. */
5011
5012 static void
5013 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5014 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5015 unsigned int rm)
5016 {
5017 ULONGEST rn_val, rm_val;
5018
5019 /* Preload register-offset instructions:
5020
5021 {pli/pld} [rn, rm {, shift}]
5022 ->
5023 {pli/pld} [r0, r1 {, shift}]. */
5024
5025 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5026 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5027 rn_val = displaced_read_reg (regs, dsc, rn);
5028 rm_val = displaced_read_reg (regs, dsc, rm);
5029 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5030 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5031 dsc->u.preload.immed = 0;
5032
5033 dsc->cleanup = &cleanup_preload;
5034 }
5035
5036 static int
5037 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5038 struct regcache *regs,
5039 arm_displaced_step_copy_insn_closure *dsc)
5040 {
5041 unsigned int rn = bits (insn, 16, 19);
5042 unsigned int rm = bits (insn, 0, 3);
5043
5044
5045 if (!insn_references_pc (insn, 0x000f000ful))
5046 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5047
5048 displaced_debug_printf ("copying preload insn %.8lx",
5049 (unsigned long) insn);
5050
5051 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5052
5053 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5054 return 0;
5055 }
5056
5057 /* Copy/cleanup coprocessor load and store instructions. */
5058
5059 static void
5060 cleanup_copro_load_store (struct gdbarch *gdbarch,
5061 struct regcache *regs,
5062 arm_displaced_step_copy_insn_closure *dsc)
5063 {
5064 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5065
5066 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5067
5068 if (dsc->u.ldst.writeback)
5069 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5070 }
5071
5072 static void
5073 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5074 arm_displaced_step_copy_insn_closure *dsc,
5075 int writeback, unsigned int rn)
5076 {
5077 ULONGEST rn_val;
5078
5079 /* Coprocessor load/store instructions:
5080
5081 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5082 ->
5083 {stc/stc2} [r0, #+/-imm].
5084
5085 ldc/ldc2 are handled identically. */
5086
5087 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5088 rn_val = displaced_read_reg (regs, dsc, rn);
5089 /* PC should be 4-byte aligned. */
5090 rn_val = rn_val & 0xfffffffc;
5091 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5092
5093 dsc->u.ldst.writeback = writeback;
5094 dsc->u.ldst.rn = rn;
5095
5096 dsc->cleanup = &cleanup_copro_load_store;
5097 }
5098
5099 static int
5100 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5101 struct regcache *regs,
5102 arm_displaced_step_copy_insn_closure *dsc)
5103 {
5104 unsigned int rn = bits (insn, 16, 19);
5105
5106 if (!insn_references_pc (insn, 0x000f0000ul))
5107 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5108
5109 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5110 (unsigned long) insn);
5111
5112 dsc->modinsn[0] = insn & 0xfff0ffff;
5113
5114 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5115
5116 return 0;
5117 }
5118
5119 static int
5120 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5121 uint16_t insn2, struct regcache *regs,
5122 arm_displaced_step_copy_insn_closure *dsc)
5123 {
5124 unsigned int rn = bits (insn1, 0, 3);
5125
5126 if (rn != ARM_PC_REGNUM)
5127 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5128 "copro load/store", dsc);
5129
5130 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5131 insn1, insn2);
5132
5133 dsc->modinsn[0] = insn1 & 0xfff0;
5134 dsc->modinsn[1] = insn2;
5135 dsc->numinsns = 2;
5136
5137 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5138 doesn't support writeback, so pass 0. */
5139 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5140
5141 return 0;
5142 }
5143
5144 /* Clean up branch instructions (actually perform the branch, by setting
5145 PC). */
5146
5147 static void
5148 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5149 arm_displaced_step_copy_insn_closure *dsc)
5150 {
5151 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5152 int branch_taken = condition_true (dsc->u.branch.cond, status);
5153 enum pc_write_style write_pc = dsc->u.branch.exchange
5154 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5155
5156 if (!branch_taken)
5157 return;
5158
5159 if (dsc->u.branch.link)
5160 {
5161 /* The value of LR should be the next insn of current one. In order
5162 not to confuse logic handling later insn `bx lr', if current insn mode
5163 is Thumb, the bit 0 of LR value should be set to 1. */
5164 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5165
5166 if (dsc->is_thumb)
5167 next_insn_addr |= 0x1;
5168
5169 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5170 CANNOT_WRITE_PC);
5171 }
5172
5173 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5174 }
5175
5176 /* Copy B/BL/BLX instructions with immediate destinations. */
5177
5178 static void
5179 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5180 arm_displaced_step_copy_insn_closure *dsc,
5181 unsigned int cond, int exchange, int link, long offset)
5182 {
5183 /* Implement "BL<cond> <label>" as:
5184
5185 Preparation: cond <- instruction condition
5186 Insn: mov r0, r0 (nop)
5187 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5188
5189 B<cond> similar, but don't set r14 in cleanup. */
5190
5191 dsc->u.branch.cond = cond;
5192 dsc->u.branch.link = link;
5193 dsc->u.branch.exchange = exchange;
5194
5195 dsc->u.branch.dest = dsc->insn_addr;
5196 if (link && exchange)
5197 /* For BLX, offset is computed from the Align (PC, 4). */
5198 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5199
5200 if (dsc->is_thumb)
5201 dsc->u.branch.dest += 4 + offset;
5202 else
5203 dsc->u.branch.dest += 8 + offset;
5204
5205 dsc->cleanup = &cleanup_branch;
5206 }
5207 static int
5208 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5209 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5210 {
5211 unsigned int cond = bits (insn, 28, 31);
5212 int exchange = (cond == 0xf);
5213 int link = exchange || bit (insn, 24);
5214 long offset;
5215
5216 displaced_debug_printf ("copying %s immediate insn %.8lx",
5217 (exchange) ? "blx" : (link) ? "bl" : "b",
5218 (unsigned long) insn);
5219 if (exchange)
5220 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5221 then arrange the switch into Thumb mode. */
5222 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5223 else
5224 offset = bits (insn, 0, 23) << 2;
5225
5226 if (bit (offset, 25))
5227 offset = offset | ~0x3ffffff;
5228
5229 dsc->modinsn[0] = ARM_NOP;
5230
5231 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5232 return 0;
5233 }
5234
5235 static int
5236 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5237 uint16_t insn2, struct regcache *regs,
5238 arm_displaced_step_copy_insn_closure *dsc)
5239 {
5240 int link = bit (insn2, 14);
5241 int exchange = link && !bit (insn2, 12);
5242 int cond = INST_AL;
5243 long offset = 0;
5244 int j1 = bit (insn2, 13);
5245 int j2 = bit (insn2, 11);
5246 int s = sbits (insn1, 10, 10);
5247 int i1 = !(j1 ^ bit (insn1, 10));
5248 int i2 = !(j2 ^ bit (insn1, 10));
5249
5250 if (!link && !exchange) /* B */
5251 {
5252 offset = (bits (insn2, 0, 10) << 1);
5253 if (bit (insn2, 12)) /* Encoding T4 */
5254 {
5255 offset |= (bits (insn1, 0, 9) << 12)
5256 | (i2 << 22)
5257 | (i1 << 23)
5258 | (s << 24);
5259 cond = INST_AL;
5260 }
5261 else /* Encoding T3 */
5262 {
5263 offset |= (bits (insn1, 0, 5) << 12)
5264 | (j1 << 18)
5265 | (j2 << 19)
5266 | (s << 20);
5267 cond = bits (insn1, 6, 9);
5268 }
5269 }
5270 else
5271 {
5272 offset = (bits (insn1, 0, 9) << 12);
5273 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5274 offset |= exchange ?
5275 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5276 }
5277
5278 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5279 link ? (exchange) ? "blx" : "bl" : "b",
5280 insn1, insn2, offset);
5281
5282 dsc->modinsn[0] = THUMB_NOP;
5283
5284 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5285 return 0;
5286 }
5287
5288 /* Copy B Thumb instructions. */
5289 static int
5290 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5291 arm_displaced_step_copy_insn_closure *dsc)
5292 {
5293 unsigned int cond = 0;
5294 int offset = 0;
5295 unsigned short bit_12_15 = bits (insn, 12, 15);
5296 CORE_ADDR from = dsc->insn_addr;
5297
5298 if (bit_12_15 == 0xd)
5299 {
5300 /* offset = SignExtend (imm8:0, 32) */
5301 offset = sbits ((insn << 1), 0, 8);
5302 cond = bits (insn, 8, 11);
5303 }
5304 else if (bit_12_15 == 0xe) /* Encoding T2 */
5305 {
5306 offset = sbits ((insn << 1), 0, 11);
5307 cond = INST_AL;
5308 }
5309
5310 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5311 insn, offset);
5312
5313 dsc->u.branch.cond = cond;
5314 dsc->u.branch.link = 0;
5315 dsc->u.branch.exchange = 0;
5316 dsc->u.branch.dest = from + 4 + offset;
5317
5318 dsc->modinsn[0] = THUMB_NOP;
5319
5320 dsc->cleanup = &cleanup_branch;
5321
5322 return 0;
5323 }
5324
5325 /* Copy BX/BLX with register-specified destinations. */
5326
5327 static void
5328 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5329 arm_displaced_step_copy_insn_closure *dsc, int link,
5330 unsigned int cond, unsigned int rm)
5331 {
5332 /* Implement {BX,BLX}<cond> <reg>" as:
5333
5334 Preparation: cond <- instruction condition
5335 Insn: mov r0, r0 (nop)
5336 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5337
5338 Don't set r14 in cleanup for BX. */
5339
5340 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5341
5342 dsc->u.branch.cond = cond;
5343 dsc->u.branch.link = link;
5344
5345 dsc->u.branch.exchange = 1;
5346
5347 dsc->cleanup = &cleanup_branch;
5348 }
5349
5350 static int
5351 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5352 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5353 {
5354 unsigned int cond = bits (insn, 28, 31);
5355 /* BX: x12xxx1x
5356 BLX: x12xxx3x. */
5357 int link = bit (insn, 5);
5358 unsigned int rm = bits (insn, 0, 3);
5359
5360 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5361
5362 dsc->modinsn[0] = ARM_NOP;
5363
5364 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5365 return 0;
5366 }
5367
5368 static int
5369 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5370 struct regcache *regs,
5371 arm_displaced_step_copy_insn_closure *dsc)
5372 {
5373 int link = bit (insn, 7);
5374 unsigned int rm = bits (insn, 3, 6);
5375
5376 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5377
5378 dsc->modinsn[0] = THUMB_NOP;
5379
5380 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5381
5382 return 0;
5383 }
5384
5385
5386 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5387
5388 static void
5389 cleanup_alu_imm (struct gdbarch *gdbarch,
5390 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5391 {
5392 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5393 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5394 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5395 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5396 }
5397
5398 static int
5399 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5400 arm_displaced_step_copy_insn_closure *dsc)
5401 {
5402 unsigned int rn = bits (insn, 16, 19);
5403 unsigned int rd = bits (insn, 12, 15);
5404 unsigned int op = bits (insn, 21, 24);
5405 int is_mov = (op == 0xd);
5406 ULONGEST rd_val, rn_val;
5407
5408 if (!insn_references_pc (insn, 0x000ff000ul))
5409 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5410
5411 displaced_debug_printf ("copying immediate %s insn %.8lx",
5412 is_mov ? "move" : "ALU",
5413 (unsigned long) insn);
5414
5415 /* Instruction is of form:
5416
5417 <op><cond> rd, [rn,] #imm
5418
5419 Rewrite as:
5420
5421 Preparation: tmp1, tmp2 <- r0, r1;
5422 r0, r1 <- rd, rn
5423 Insn: <op><cond> r0, r1, #imm
5424 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5425 */
5426
5427 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5428 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5429 rn_val = displaced_read_reg (regs, dsc, rn);
5430 rd_val = displaced_read_reg (regs, dsc, rd);
5431 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5432 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5433 dsc->rd = rd;
5434
5435 if (is_mov)
5436 dsc->modinsn[0] = insn & 0xfff00fff;
5437 else
5438 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5439
5440 dsc->cleanup = &cleanup_alu_imm;
5441
5442 return 0;
5443 }
5444
5445 static int
5446 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5447 uint16_t insn2, struct regcache *regs,
5448 arm_displaced_step_copy_insn_closure *dsc)
5449 {
5450 unsigned int op = bits (insn1, 5, 8);
5451 unsigned int rn, rm, rd;
5452 ULONGEST rd_val, rn_val;
5453
5454 rn = bits (insn1, 0, 3); /* Rn */
5455 rm = bits (insn2, 0, 3); /* Rm */
5456 rd = bits (insn2, 8, 11); /* Rd */
5457
5458 /* This routine is only called for instruction MOV. */
5459 gdb_assert (op == 0x2 && rn == 0xf);
5460
5461 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5462 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5463
5464 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5465
5466 /* Instruction is of form:
5467
5468 <op><cond> rd, [rn,] #imm
5469
5470 Rewrite as:
5471
5472 Preparation: tmp1, tmp2 <- r0, r1;
5473 r0, r1 <- rd, rn
5474 Insn: <op><cond> r0, r1, #imm
5475 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5476 */
5477
5478 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5479 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5480 rn_val = displaced_read_reg (regs, dsc, rn);
5481 rd_val = displaced_read_reg (regs, dsc, rd);
5482 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5483 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5484 dsc->rd = rd;
5485
5486 dsc->modinsn[0] = insn1;
5487 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5488 dsc->numinsns = 2;
5489
5490 dsc->cleanup = &cleanup_alu_imm;
5491
5492 return 0;
5493 }
5494
5495 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5496
5497 static void
5498 cleanup_alu_reg (struct gdbarch *gdbarch,
5499 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5500 {
5501 ULONGEST rd_val;
5502 int i;
5503
5504 rd_val = displaced_read_reg (regs, dsc, 0);
5505
5506 for (i = 0; i < 3; i++)
5507 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5508
5509 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5510 }
5511
5512 static void
5513 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5514 arm_displaced_step_copy_insn_closure *dsc,
5515 unsigned int rd, unsigned int rn, unsigned int rm)
5516 {
5517 ULONGEST rd_val, rn_val, rm_val;
5518
5519 /* Instruction is of form:
5520
5521 <op><cond> rd, [rn,] rm [, <shift>]
5522
5523 Rewrite as:
5524
5525 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5526 r0, r1, r2 <- rd, rn, rm
5527 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5528 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5529 */
5530
5531 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5532 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5533 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5534 rd_val = displaced_read_reg (regs, dsc, rd);
5535 rn_val = displaced_read_reg (regs, dsc, rn);
5536 rm_val = displaced_read_reg (regs, dsc, rm);
5537 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5538 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5539 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5540 dsc->rd = rd;
5541
5542 dsc->cleanup = &cleanup_alu_reg;
5543 }
5544
5545 static int
5546 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5547 arm_displaced_step_copy_insn_closure *dsc)
5548 {
5549 unsigned int op = bits (insn, 21, 24);
5550 int is_mov = (op == 0xd);
5551
5552 if (!insn_references_pc (insn, 0x000ff00ful))
5553 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5554
5555 displaced_debug_printf ("copying reg %s insn %.8lx",
5556 is_mov ? "move" : "ALU", (unsigned long) insn);
5557
5558 if (is_mov)
5559 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5560 else
5561 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5562
5563 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5564 bits (insn, 0, 3));
5565 return 0;
5566 }
5567
5568 static int
5569 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5570 struct regcache *regs,
5571 arm_displaced_step_copy_insn_closure *dsc)
5572 {
5573 unsigned rm, rd;
5574
5575 rm = bits (insn, 3, 6);
5576 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5577
5578 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5579 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5580
5581 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5582
5583 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5584
5585 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5586
5587 return 0;
5588 }
5589
5590 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5591
5592 static void
5593 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5594 struct regcache *regs,
5595 arm_displaced_step_copy_insn_closure *dsc)
5596 {
5597 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5598 int i;
5599
5600 for (i = 0; i < 4; i++)
5601 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5602
5603 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5604 }
5605
5606 static void
5607 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5608 arm_displaced_step_copy_insn_closure *dsc,
5609 unsigned int rd, unsigned int rn, unsigned int rm,
5610 unsigned rs)
5611 {
5612 int i;
5613 ULONGEST rd_val, rn_val, rm_val, rs_val;
5614
5615 /* Instruction is of form:
5616
5617 <op><cond> rd, [rn,] rm, <shift> rs
5618
5619 Rewrite as:
5620
5621 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5622 r0, r1, r2, r3 <- rd, rn, rm, rs
5623 Insn: <op><cond> r0, r1, r2, <shift> r3
5624 Cleanup: tmp5 <- r0
5625 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5626 rd <- tmp5
5627 */
5628
5629 for (i = 0; i < 4; i++)
5630 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5631
5632 rd_val = displaced_read_reg (regs, dsc, rd);
5633 rn_val = displaced_read_reg (regs, dsc, rn);
5634 rm_val = displaced_read_reg (regs, dsc, rm);
5635 rs_val = displaced_read_reg (regs, dsc, rs);
5636 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5637 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5638 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5639 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5640 dsc->rd = rd;
5641 dsc->cleanup = &cleanup_alu_shifted_reg;
5642 }
5643
5644 static int
5645 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5646 struct regcache *regs,
5647 arm_displaced_step_copy_insn_closure *dsc)
5648 {
5649 unsigned int op = bits (insn, 21, 24);
5650 int is_mov = (op == 0xd);
5651 unsigned int rd, rn, rm, rs;
5652
5653 if (!insn_references_pc (insn, 0x000fff0ful))
5654 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5655
5656 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5657 is_mov ? "move" : "ALU",
5658 (unsigned long) insn);
5659
5660 rn = bits (insn, 16, 19);
5661 rm = bits (insn, 0, 3);
5662 rs = bits (insn, 8, 11);
5663 rd = bits (insn, 12, 15);
5664
5665 if (is_mov)
5666 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5667 else
5668 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5669
5670 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5671
5672 return 0;
5673 }
5674
5675 /* Clean up load instructions. */
5676
5677 static void
5678 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5679 arm_displaced_step_copy_insn_closure *dsc)
5680 {
5681 ULONGEST rt_val, rt_val2 = 0, rn_val;
5682
5683 rt_val = displaced_read_reg (regs, dsc, 0);
5684 if (dsc->u.ldst.xfersize == 8)
5685 rt_val2 = displaced_read_reg (regs, dsc, 1);
5686 rn_val = displaced_read_reg (regs, dsc, 2);
5687
5688 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5689 if (dsc->u.ldst.xfersize > 4)
5690 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5691 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5692 if (!dsc->u.ldst.immed)
5693 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5694
5695 /* Handle register writeback. */
5696 if (dsc->u.ldst.writeback)
5697 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5698 /* Put result in right place. */
5699 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5700 if (dsc->u.ldst.xfersize == 8)
5701 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5702 }
5703
5704 /* Clean up store instructions. */
5705
5706 static void
5707 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5708 arm_displaced_step_copy_insn_closure *dsc)
5709 {
5710 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5711
5712 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5713 if (dsc->u.ldst.xfersize > 4)
5714 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5715 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5716 if (!dsc->u.ldst.immed)
5717 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5718 if (!dsc->u.ldst.restore_r4)
5719 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5720
5721 /* Writeback. */
5722 if (dsc->u.ldst.writeback)
5723 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5724 }
5725
5726 /* Copy "extra" load/store instructions. These are halfword/doubleword
5727 transfers, which have a different encoding to byte/word transfers. */
5728
5729 static int
5730 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5731 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5732 {
5733 unsigned int op1 = bits (insn, 20, 24);
5734 unsigned int op2 = bits (insn, 5, 6);
5735 unsigned int rt = bits (insn, 12, 15);
5736 unsigned int rn = bits (insn, 16, 19);
5737 unsigned int rm = bits (insn, 0, 3);
5738 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5739 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5740 int immed = (op1 & 0x4) != 0;
5741 int opcode;
5742 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5743
5744 if (!insn_references_pc (insn, 0x000ff00ful))
5745 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5746
5747 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5748 unprivileged ? "unprivileged " : "",
5749 (unsigned long) insn);
5750
5751 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5752
5753 if (opcode < 0)
5754 internal_error (__FILE__, __LINE__,
5755 _("copy_extra_ld_st: instruction decode error"));
5756
5757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5758 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5759 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5760 if (!immed)
5761 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5762
5763 rt_val = displaced_read_reg (regs, dsc, rt);
5764 if (bytesize[opcode] == 8)
5765 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5766 rn_val = displaced_read_reg (regs, dsc, rn);
5767 if (!immed)
5768 rm_val = displaced_read_reg (regs, dsc, rm);
5769
5770 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5771 if (bytesize[opcode] == 8)
5772 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5773 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5774 if (!immed)
5775 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5776
5777 dsc->rd = rt;
5778 dsc->u.ldst.xfersize = bytesize[opcode];
5779 dsc->u.ldst.rn = rn;
5780 dsc->u.ldst.immed = immed;
5781 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5782 dsc->u.ldst.restore_r4 = 0;
5783
5784 if (immed)
5785 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5786 ->
5787 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5788 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5789 else
5790 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5791 ->
5792 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5793 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5794
5795 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5796
5797 return 0;
5798 }
5799
5800 /* Copy byte/half word/word loads and stores. */
5801
5802 static void
5803 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5804 arm_displaced_step_copy_insn_closure *dsc, int load,
5805 int immed, int writeback, int size, int usermode,
5806 int rt, int rm, int rn)
5807 {
5808 ULONGEST rt_val, rn_val, rm_val = 0;
5809
5810 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5811 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5812 if (!immed)
5813 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5814 if (!load)
5815 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5816
5817 rt_val = displaced_read_reg (regs, dsc, rt);
5818 rn_val = displaced_read_reg (regs, dsc, rn);
5819 if (!immed)
5820 rm_val = displaced_read_reg (regs, dsc, rm);
5821
5822 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5823 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5824 if (!immed)
5825 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5826 dsc->rd = rt;
5827 dsc->u.ldst.xfersize = size;
5828 dsc->u.ldst.rn = rn;
5829 dsc->u.ldst.immed = immed;
5830 dsc->u.ldst.writeback = writeback;
5831
5832 /* To write PC we can do:
5833
5834 Before this sequence of instructions:
5835 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5836 r2 is the Rn value got from displaced_read_reg.
5837
5838 Insn1: push {pc} Write address of STR instruction + offset on stack
5839 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5840 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5841 = addr(Insn1) + offset - addr(Insn3) - 8
5842 = offset - 16
5843 Insn4: add r4, r4, #8 r4 = offset - 8
5844 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5845 = from + offset
5846 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5847
5848 Otherwise we don't know what value to write for PC, since the offset is
5849 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5850 of this can be found in Section "Saving from r15" in
5851 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5852
5853 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5854 }
5855
5856
5857 static int
5858 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5859 uint16_t insn2, struct regcache *regs,
5860 arm_displaced_step_copy_insn_closure *dsc, int size)
5861 {
5862 unsigned int u_bit = bit (insn1, 7);
5863 unsigned int rt = bits (insn2, 12, 15);
5864 int imm12 = bits (insn2, 0, 11);
5865 ULONGEST pc_val;
5866
5867 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5868 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5869 imm12);
5870
5871 if (!u_bit)
5872 imm12 = -1 * imm12;
5873
5874 /* Rewrite instruction LDR Rt imm12 into:
5875
5876 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5877
5878 LDR R0, R2, R3,
5879
5880 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5881
5882
5883 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5884 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5885 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5886
5887 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5888
5889 pc_val = pc_val & 0xfffffffc;
5890
5891 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5892 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5893
5894 dsc->rd = rt;
5895
5896 dsc->u.ldst.xfersize = size;
5897 dsc->u.ldst.immed = 0;
5898 dsc->u.ldst.writeback = 0;
5899 dsc->u.ldst.restore_r4 = 0;
5900
5901 /* LDR R0, R2, R3 */
5902 dsc->modinsn[0] = 0xf852;
5903 dsc->modinsn[1] = 0x3;
5904 dsc->numinsns = 2;
5905
5906 dsc->cleanup = &cleanup_load;
5907
5908 return 0;
5909 }
5910
5911 static int
5912 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5913 uint16_t insn2, struct regcache *regs,
5914 arm_displaced_step_copy_insn_closure *dsc,
5915 int writeback, int immed)
5916 {
5917 unsigned int rt = bits (insn2, 12, 15);
5918 unsigned int rn = bits (insn1, 0, 3);
5919 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5920 /* In LDR (register), there is also a register Rm, which is not allowed to
5921 be PC, so we don't have to check it. */
5922
5923 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5924 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5925 dsc);
5926
5927 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5928 rt, rn, insn1, insn2);
5929
5930 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5931 0, rt, rm, rn);
5932
5933 dsc->u.ldst.restore_r4 = 0;
5934
5935 if (immed)
5936 /* ldr[b]<cond> rt, [rn, #imm], etc.
5937 ->
5938 ldr[b]<cond> r0, [r2, #imm]. */
5939 {
5940 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5941 dsc->modinsn[1] = insn2 & 0x0fff;
5942 }
5943 else
5944 /* ldr[b]<cond> rt, [rn, rm], etc.
5945 ->
5946 ldr[b]<cond> r0, [r2, r3]. */
5947 {
5948 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5949 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5950 }
5951
5952 dsc->numinsns = 2;
5953
5954 return 0;
5955 }
5956
5957
5958 static int
5959 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5960 struct regcache *regs,
5961 arm_displaced_step_copy_insn_closure *dsc,
5962 int load, int size, int usermode)
5963 {
5964 int immed = !bit (insn, 25);
5965 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5966 unsigned int rt = bits (insn, 12, 15);
5967 unsigned int rn = bits (insn, 16, 19);
5968 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5969
5970 if (!insn_references_pc (insn, 0x000ff00ful))
5971 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5972
5973 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5974 load ? (size == 1 ? "ldrb" : "ldr")
5975 : (size == 1 ? "strb" : "str"),
5976 usermode ? "t" : "",
5977 rt, rn,
5978 (unsigned long) insn);
5979
5980 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5981 usermode, rt, rm, rn);
5982
5983 if (load || rt != ARM_PC_REGNUM)
5984 {
5985 dsc->u.ldst.restore_r4 = 0;
5986
5987 if (immed)
5988 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5989 ->
5990 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5991 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5992 else
5993 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5994 ->
5995 {ldr,str}[b]<cond> r0, [r2, r3]. */
5996 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5997 }
5998 else
5999 {
6000 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6001 dsc->u.ldst.restore_r4 = 1;
6002 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6003 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6004 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6005 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6006 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6007
6008 /* As above. */
6009 if (immed)
6010 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6011 else
6012 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6013
6014 dsc->numinsns = 6;
6015 }
6016
6017 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6018
6019 return 0;
6020 }
6021
6022 /* Cleanup LDM instructions with fully-populated register list. This is an
6023 unfortunate corner case: it's impossible to implement correctly by modifying
6024 the instruction. The issue is as follows: we have an instruction,
6025
6026 ldm rN, {r0-r15}
6027
6028 which we must rewrite to avoid loading PC. A possible solution would be to
6029 do the load in two halves, something like (with suitable cleanup
6030 afterwards):
6031
6032 mov r8, rN
6033 ldm[id][ab] r8!, {r0-r7}
6034 str r7, <temp>
6035 ldm[id][ab] r8, {r7-r14}
6036 <bkpt>
6037
6038 but at present there's no suitable place for <temp>, since the scratch space
6039 is overwritten before the cleanup routine is called. For now, we simply
6040 emulate the instruction. */
6041
6042 static void
6043 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6044 arm_displaced_step_copy_insn_closure *dsc)
6045 {
6046 int inc = dsc->u.block.increment;
6047 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6048 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6049 uint32_t regmask = dsc->u.block.regmask;
6050 int regno = inc ? 0 : 15;
6051 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6052 int exception_return = dsc->u.block.load && dsc->u.block.user
6053 && (regmask & 0x8000) != 0;
6054 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6055 int do_transfer = condition_true (dsc->u.block.cond, status);
6056 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6057
6058 if (!do_transfer)
6059 return;
6060
6061 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6062 sensible we can do here. Complain loudly. */
6063 if (exception_return)
6064 error (_("Cannot single-step exception return"));
6065
6066 /* We don't handle any stores here for now. */
6067 gdb_assert (dsc->u.block.load != 0);
6068
6069 displaced_debug_printf ("emulating block transfer: %s %s %s",
6070 dsc->u.block.load ? "ldm" : "stm",
6071 dsc->u.block.increment ? "inc" : "dec",
6072 dsc->u.block.before ? "before" : "after");
6073
6074 while (regmask)
6075 {
6076 uint32_t memword;
6077
6078 if (inc)
6079 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6080 regno++;
6081 else
6082 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6083 regno--;
6084
6085 xfer_addr += bump_before;
6086
6087 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6088 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6089
6090 xfer_addr += bump_after;
6091
6092 regmask &= ~(1 << regno);
6093 }
6094
6095 if (dsc->u.block.writeback)
6096 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6097 CANNOT_WRITE_PC);
6098 }
6099
6100 /* Clean up an STM which included the PC in the register list. */
6101
6102 static void
6103 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6104 arm_displaced_step_copy_insn_closure *dsc)
6105 {
6106 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6107 int store_executed = condition_true (dsc->u.block.cond, status);
6108 CORE_ADDR pc_stored_at, transferred_regs
6109 = count_one_bits (dsc->u.block.regmask);
6110 CORE_ADDR stm_insn_addr;
6111 uint32_t pc_val;
6112 long offset;
6113 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6114
6115 /* If condition code fails, there's nothing else to do. */
6116 if (!store_executed)
6117 return;
6118
6119 if (dsc->u.block.increment)
6120 {
6121 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6122
6123 if (dsc->u.block.before)
6124 pc_stored_at += 4;
6125 }
6126 else
6127 {
6128 pc_stored_at = dsc->u.block.xfer_addr;
6129
6130 if (dsc->u.block.before)
6131 pc_stored_at -= 4;
6132 }
6133
6134 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6135 stm_insn_addr = dsc->scratch_base;
6136 offset = pc_val - stm_insn_addr;
6137
6138 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6139 offset);
6140
6141 /* Rewrite the stored PC to the proper value for the non-displaced original
6142 instruction. */
6143 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6144 dsc->insn_addr + offset);
6145 }
6146
6147 /* Clean up an LDM which includes the PC in the register list. We clumped all
6148 the registers in the transferred list into a contiguous range r0...rX (to
6149 avoid loading PC directly and losing control of the debugged program), so we
6150 must undo that here. */
6151
6152 static void
6153 cleanup_block_load_pc (struct gdbarch *gdbarch,
6154 struct regcache *regs,
6155 arm_displaced_step_copy_insn_closure *dsc)
6156 {
6157 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6158 int load_executed = condition_true (dsc->u.block.cond, status);
6159 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6160 unsigned int regs_loaded = count_one_bits (mask);
6161 unsigned int num_to_shuffle = regs_loaded, clobbered;
6162
6163 /* The method employed here will fail if the register list is fully populated
6164 (we need to avoid loading PC directly). */
6165 gdb_assert (num_to_shuffle < 16);
6166
6167 if (!load_executed)
6168 return;
6169
6170 clobbered = (1 << num_to_shuffle) - 1;
6171
6172 while (num_to_shuffle > 0)
6173 {
6174 if ((mask & (1 << write_reg)) != 0)
6175 {
6176 unsigned int read_reg = num_to_shuffle - 1;
6177
6178 if (read_reg != write_reg)
6179 {
6180 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6181 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6182 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6183 read_reg, write_reg);
6184 }
6185 else
6186 displaced_debug_printf ("LDM: register r%d already in the right "
6187 "place", write_reg);
6188
6189 clobbered &= ~(1 << write_reg);
6190
6191 num_to_shuffle--;
6192 }
6193
6194 write_reg--;
6195 }
6196
6197 /* Restore any registers we scribbled over. */
6198 for (write_reg = 0; clobbered != 0; write_reg++)
6199 {
6200 if ((clobbered & (1 << write_reg)) != 0)
6201 {
6202 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6203 CANNOT_WRITE_PC);
6204 displaced_debug_printf ("LDM: restored clobbered register r%d",
6205 write_reg);
6206 clobbered &= ~(1 << write_reg);
6207 }
6208 }
6209
6210 /* Perform register writeback manually. */
6211 if (dsc->u.block.writeback)
6212 {
6213 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6214
6215 if (dsc->u.block.increment)
6216 new_rn_val += regs_loaded * 4;
6217 else
6218 new_rn_val -= regs_loaded * 4;
6219
6220 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6221 CANNOT_WRITE_PC);
6222 }
6223 }
6224
6225 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6226 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6227
6228 static int
6229 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6230 struct regcache *regs,
6231 arm_displaced_step_copy_insn_closure *dsc)
6232 {
6233 int load = bit (insn, 20);
6234 int user = bit (insn, 22);
6235 int increment = bit (insn, 23);
6236 int before = bit (insn, 24);
6237 int writeback = bit (insn, 21);
6238 int rn = bits (insn, 16, 19);
6239
6240 /* Block transfers which don't mention PC can be run directly
6241 out-of-line. */
6242 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6243 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6244
6245 if (rn == ARM_PC_REGNUM)
6246 {
6247 warning (_("displaced: Unpredictable LDM or STM with "
6248 "base register r15"));
6249 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6250 }
6251
6252 displaced_debug_printf ("copying block transfer insn %.8lx",
6253 (unsigned long) insn);
6254
6255 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6256 dsc->u.block.rn = rn;
6257
6258 dsc->u.block.load = load;
6259 dsc->u.block.user = user;
6260 dsc->u.block.increment = increment;
6261 dsc->u.block.before = before;
6262 dsc->u.block.writeback = writeback;
6263 dsc->u.block.cond = bits (insn, 28, 31);
6264
6265 dsc->u.block.regmask = insn & 0xffff;
6266
6267 if (load)
6268 {
6269 if ((insn & 0xffff) == 0xffff)
6270 {
6271 /* LDM with a fully-populated register list. This case is
6272 particularly tricky. Implement for now by fully emulating the
6273 instruction (which might not behave perfectly in all cases, but
6274 these instructions should be rare enough for that not to matter
6275 too much). */
6276 dsc->modinsn[0] = ARM_NOP;
6277
6278 dsc->cleanup = &cleanup_block_load_all;
6279 }
6280 else
6281 {
6282 /* LDM of a list of registers which includes PC. Implement by
6283 rewriting the list of registers to be transferred into a
6284 contiguous chunk r0...rX before doing the transfer, then shuffling
6285 registers into the correct places in the cleanup routine. */
6286 unsigned int regmask = insn & 0xffff;
6287 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6288 unsigned int i;
6289
6290 for (i = 0; i < num_in_list; i++)
6291 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6292
6293 /* Writeback makes things complicated. We need to avoid clobbering
6294 the base register with one of the registers in our modified
6295 register list, but just using a different register can't work in
6296 all cases, e.g.:
6297
6298 ldm r14!, {r0-r13,pc}
6299
6300 which would need to be rewritten as:
6301
6302 ldm rN!, {r0-r14}
6303
6304 but that can't work, because there's no free register for N.
6305
6306 Solve this by turning off the writeback bit, and emulating
6307 writeback manually in the cleanup routine. */
6308
6309 if (writeback)
6310 insn &= ~(1 << 21);
6311
6312 new_regmask = (1 << num_in_list) - 1;
6313
6314 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6315 "%.4x, modified list %.4x",
6316 rn, writeback ? "!" : "",
6317 (int) insn & 0xffff, new_regmask);
6318
6319 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6320
6321 dsc->cleanup = &cleanup_block_load_pc;
6322 }
6323 }
6324 else
6325 {
6326 /* STM of a list of registers which includes PC. Run the instruction
6327 as-is, but out of line: this will store the wrong value for the PC,
6328 so we must manually fix up the memory in the cleanup routine.
6329 Doing things this way has the advantage that we can auto-detect
6330 the offset of the PC write (which is architecture-dependent) in
6331 the cleanup routine. */
6332 dsc->modinsn[0] = insn;
6333
6334 dsc->cleanup = &cleanup_block_store_pc;
6335 }
6336
6337 return 0;
6338 }
6339
6340 static int
6341 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6342 struct regcache *regs,
6343 arm_displaced_step_copy_insn_closure *dsc)
6344 {
6345 int rn = bits (insn1, 0, 3);
6346 int load = bit (insn1, 4);
6347 int writeback = bit (insn1, 5);
6348
6349 /* Block transfers which don't mention PC can be run directly
6350 out-of-line. */
6351 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6352 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6353
6354 if (rn == ARM_PC_REGNUM)
6355 {
6356 warning (_("displaced: Unpredictable LDM or STM with "
6357 "base register r15"));
6358 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6359 "unpredictable ldm/stm", dsc);
6360 }
6361
6362 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6363 insn1, insn2);
6364
6365 /* Clear bit 13, since it should be always zero. */
6366 dsc->u.block.regmask = (insn2 & 0xdfff);
6367 dsc->u.block.rn = rn;
6368
6369 dsc->u.block.load = load;
6370 dsc->u.block.user = 0;
6371 dsc->u.block.increment = bit (insn1, 7);
6372 dsc->u.block.before = bit (insn1, 8);
6373 dsc->u.block.writeback = writeback;
6374 dsc->u.block.cond = INST_AL;
6375 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6376
6377 if (load)
6378 {
6379 if (dsc->u.block.regmask == 0xffff)
6380 {
6381 /* This branch is impossible to happen. */
6382 gdb_assert (0);
6383 }
6384 else
6385 {
6386 unsigned int regmask = dsc->u.block.regmask;
6387 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6388 unsigned int i;
6389
6390 for (i = 0; i < num_in_list; i++)
6391 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6392
6393 if (writeback)
6394 insn1 &= ~(1 << 5);
6395
6396 new_regmask = (1 << num_in_list) - 1;
6397
6398 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6399 "%.4x, modified list %.4x",
6400 rn, writeback ? "!" : "",
6401 (int) dsc->u.block.regmask, new_regmask);
6402
6403 dsc->modinsn[0] = insn1;
6404 dsc->modinsn[1] = (new_regmask & 0xffff);
6405 dsc->numinsns = 2;
6406
6407 dsc->cleanup = &cleanup_block_load_pc;
6408 }
6409 }
6410 else
6411 {
6412 dsc->modinsn[0] = insn1;
6413 dsc->modinsn[1] = insn2;
6414 dsc->numinsns = 2;
6415 dsc->cleanup = &cleanup_block_store_pc;
6416 }
6417 return 0;
6418 }
6419
6420 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6421 This is used to avoid a dependency on BFD's bfd_endian enum. */
6422
6423 ULONGEST
6424 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6425 int byte_order)
6426 {
6427 return read_memory_unsigned_integer (memaddr, len,
6428 (enum bfd_endian) byte_order);
6429 }
6430
6431 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6432
6433 CORE_ADDR
6434 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6435 CORE_ADDR val)
6436 {
6437 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6438 }
6439
6440 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6441
6442 static CORE_ADDR
6443 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6444 {
6445 return 0;
6446 }
6447
6448 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6449
6450 int
6451 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6452 {
6453 return arm_is_thumb (self->regcache);
6454 }
6455
6456 /* single_step() is called just before we want to resume the inferior,
6457 if we want to single-step it but there is no hardware or kernel
6458 single-step support. We find the target of the coming instructions
6459 and breakpoint them. */
6460
6461 std::vector<CORE_ADDR>
6462 arm_software_single_step (struct regcache *regcache)
6463 {
6464 struct gdbarch *gdbarch = regcache->arch ();
6465 struct arm_get_next_pcs next_pcs_ctx;
6466
6467 arm_get_next_pcs_ctor (&next_pcs_ctx,
6468 &arm_get_next_pcs_ops,
6469 gdbarch_byte_order (gdbarch),
6470 gdbarch_byte_order_for_code (gdbarch),
6471 0,
6472 regcache);
6473
6474 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6475
6476 for (CORE_ADDR &pc_ref : next_pcs)
6477 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6478
6479 return next_pcs;
6480 }
6481
6482 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6483 for Linux, where some SVC instructions must be treated specially. */
6484
6485 static void
6486 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6487 arm_displaced_step_copy_insn_closure *dsc)
6488 {
6489 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6490
6491 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6492 (unsigned long) resume_addr);
6493
6494 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6495 }
6496
6497
6498 /* Common copy routine for svc instruction. */
6499
6500 static int
6501 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6502 arm_displaced_step_copy_insn_closure *dsc)
6503 {
6504 /* Preparation: none.
6505 Insn: unmodified svc.
6506 Cleanup: pc <- insn_addr + insn_size. */
6507
6508 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6509 instruction. */
6510 dsc->wrote_to_pc = 1;
6511
6512 /* Allow OS-specific code to override SVC handling. */
6513 if (dsc->u.svc.copy_svc_os)
6514 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6515 else
6516 {
6517 dsc->cleanup = &cleanup_svc;
6518 return 0;
6519 }
6520 }
6521
6522 static int
6523 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6524 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6525 {
6526
6527 displaced_debug_printf ("copying svc insn %.8lx",
6528 (unsigned long) insn);
6529
6530 dsc->modinsn[0] = insn;
6531
6532 return install_svc (gdbarch, regs, dsc);
6533 }
6534
6535 static int
6536 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6537 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6538 {
6539
6540 displaced_debug_printf ("copying svc insn %.4x", insn);
6541
6542 dsc->modinsn[0] = insn;
6543
6544 return install_svc (gdbarch, regs, dsc);
6545 }
6546
6547 /* Copy undefined instructions. */
6548
6549 static int
6550 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6551 arm_displaced_step_copy_insn_closure *dsc)
6552 {
6553 displaced_debug_printf ("copying undefined insn %.8lx",
6554 (unsigned long) insn);
6555
6556 dsc->modinsn[0] = insn;
6557
6558 return 0;
6559 }
6560
6561 static int
6562 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6563 arm_displaced_step_copy_insn_closure *dsc)
6564 {
6565
6566 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6567 (unsigned short) insn1, (unsigned short) insn2);
6568
6569 dsc->modinsn[0] = insn1;
6570 dsc->modinsn[1] = insn2;
6571 dsc->numinsns = 2;
6572
6573 return 0;
6574 }
6575
6576 /* Copy unpredictable instructions. */
6577
6578 static int
6579 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6580 arm_displaced_step_copy_insn_closure *dsc)
6581 {
6582 displaced_debug_printf ("copying unpredictable insn %.8lx",
6583 (unsigned long) insn);
6584
6585 dsc->modinsn[0] = insn;
6586
6587 return 0;
6588 }
6589
6590 /* The decode_* functions are instruction decoding helpers. They mostly follow
6591 the presentation in the ARM ARM. */
6592
6593 static int
6594 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6595 struct regcache *regs,
6596 arm_displaced_step_copy_insn_closure *dsc)
6597 {
6598 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6599 unsigned int rn = bits (insn, 16, 19);
6600
6601 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6602 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6603 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6604 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6605 else if ((op1 & 0x60) == 0x20)
6606 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6607 else if ((op1 & 0x71) == 0x40)
6608 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6609 dsc);
6610 else if ((op1 & 0x77) == 0x41)
6611 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6612 else if ((op1 & 0x77) == 0x45)
6613 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6614 else if ((op1 & 0x77) == 0x51)
6615 {
6616 if (rn != 0xf)
6617 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6618 else
6619 return arm_copy_unpred (gdbarch, insn, dsc);
6620 }
6621 else if ((op1 & 0x77) == 0x55)
6622 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6623 else if (op1 == 0x57)
6624 switch (op2)
6625 {
6626 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6627 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6628 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6629 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6630 default: return arm_copy_unpred (gdbarch, insn, dsc);
6631 }
6632 else if ((op1 & 0x63) == 0x43)
6633 return arm_copy_unpred (gdbarch, insn, dsc);
6634 else if ((op2 & 0x1) == 0x0)
6635 switch (op1 & ~0x80)
6636 {
6637 case 0x61:
6638 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6639 case 0x65:
6640 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6641 case 0x71: case 0x75:
6642 /* pld/pldw reg. */
6643 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6644 case 0x63: case 0x67: case 0x73: case 0x77:
6645 return arm_copy_unpred (gdbarch, insn, dsc);
6646 default:
6647 return arm_copy_undef (gdbarch, insn, dsc);
6648 }
6649 else
6650 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6651 }
6652
6653 static int
6654 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6655 struct regcache *regs,
6656 arm_displaced_step_copy_insn_closure *dsc)
6657 {
6658 if (bit (insn, 27) == 0)
6659 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6660 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6661 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6662 {
6663 case 0x0: case 0x2:
6664 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6665
6666 case 0x1: case 0x3:
6667 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6668
6669 case 0x4: case 0x5: case 0x6: case 0x7:
6670 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6671
6672 case 0x8:
6673 switch ((insn & 0xe00000) >> 21)
6674 {
6675 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6676 /* stc/stc2. */
6677 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6678
6679 case 0x2:
6680 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6681
6682 default:
6683 return arm_copy_undef (gdbarch, insn, dsc);
6684 }
6685
6686 case 0x9:
6687 {
6688 int rn_f = (bits (insn, 16, 19) == 0xf);
6689 switch ((insn & 0xe00000) >> 21)
6690 {
6691 case 0x1: case 0x3:
6692 /* ldc/ldc2 imm (undefined for rn == pc). */
6693 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6694 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6695
6696 case 0x2:
6697 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6698
6699 case 0x4: case 0x5: case 0x6: case 0x7:
6700 /* ldc/ldc2 lit (undefined for rn != pc). */
6701 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6702 : arm_copy_undef (gdbarch, insn, dsc);
6703
6704 default:
6705 return arm_copy_undef (gdbarch, insn, dsc);
6706 }
6707 }
6708
6709 case 0xa:
6710 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6711
6712 case 0xb:
6713 if (bits (insn, 16, 19) == 0xf)
6714 /* ldc/ldc2 lit. */
6715 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6716 else
6717 return arm_copy_undef (gdbarch, insn, dsc);
6718
6719 case 0xc:
6720 if (bit (insn, 4))
6721 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6722 else
6723 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6724
6725 case 0xd:
6726 if (bit (insn, 4))
6727 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6728 else
6729 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6730
6731 default:
6732 return arm_copy_undef (gdbarch, insn, dsc);
6733 }
6734 }
6735
6736 /* Decode miscellaneous instructions in dp/misc encoding space. */
6737
6738 static int
6739 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6740 struct regcache *regs,
6741 arm_displaced_step_copy_insn_closure *dsc)
6742 {
6743 unsigned int op2 = bits (insn, 4, 6);
6744 unsigned int op = bits (insn, 21, 22);
6745
6746 switch (op2)
6747 {
6748 case 0x0:
6749 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6750
6751 case 0x1:
6752 if (op == 0x1) /* bx. */
6753 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6754 else if (op == 0x3)
6755 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6756 else
6757 return arm_copy_undef (gdbarch, insn, dsc);
6758
6759 case 0x2:
6760 if (op == 0x1)
6761 /* Not really supported. */
6762 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6763 else
6764 return arm_copy_undef (gdbarch, insn, dsc);
6765
6766 case 0x3:
6767 if (op == 0x1)
6768 return arm_copy_bx_blx_reg (gdbarch, insn,
6769 regs, dsc); /* blx register. */
6770 else
6771 return arm_copy_undef (gdbarch, insn, dsc);
6772
6773 case 0x5:
6774 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6775
6776 case 0x7:
6777 if (op == 0x1)
6778 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6779 else if (op == 0x3)
6780 /* Not really supported. */
6781 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6782 /* Fall through. */
6783
6784 default:
6785 return arm_copy_undef (gdbarch, insn, dsc);
6786 }
6787 }
6788
6789 static int
6790 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6791 struct regcache *regs,
6792 arm_displaced_step_copy_insn_closure *dsc)
6793 {
6794 if (bit (insn, 25))
6795 switch (bits (insn, 20, 24))
6796 {
6797 case 0x10:
6798 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6799
6800 case 0x14:
6801 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6802
6803 case 0x12: case 0x16:
6804 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6805
6806 default:
6807 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6808 }
6809 else
6810 {
6811 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6812
6813 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6814 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6815 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6816 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6817 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6818 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6819 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6820 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6821 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6822 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6823 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6824 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6825 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6826 /* 2nd arg means "unprivileged". */
6827 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6828 dsc);
6829 }
6830
6831 /* Should be unreachable. */
6832 return 1;
6833 }
6834
6835 static int
6836 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6837 struct regcache *regs,
6838 arm_displaced_step_copy_insn_closure *dsc)
6839 {
6840 int a = bit (insn, 25), b = bit (insn, 4);
6841 uint32_t op1 = bits (insn, 20, 24);
6842
6843 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6844 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6845 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6846 else if ((!a && (op1 & 0x17) == 0x02)
6847 || (a && (op1 & 0x17) == 0x02 && !b))
6848 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6849 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6850 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6851 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6852 else if ((!a && (op1 & 0x17) == 0x03)
6853 || (a && (op1 & 0x17) == 0x03 && !b))
6854 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6855 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6856 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6857 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6858 else if ((!a && (op1 & 0x17) == 0x06)
6859 || (a && (op1 & 0x17) == 0x06 && !b))
6860 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6861 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6862 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6863 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6864 else if ((!a && (op1 & 0x17) == 0x07)
6865 || (a && (op1 & 0x17) == 0x07 && !b))
6866 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6867
6868 /* Should be unreachable. */
6869 return 1;
6870 }
6871
6872 static int
6873 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6874 arm_displaced_step_copy_insn_closure *dsc)
6875 {
6876 switch (bits (insn, 20, 24))
6877 {
6878 case 0x00: case 0x01: case 0x02: case 0x03:
6879 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6880
6881 case 0x04: case 0x05: case 0x06: case 0x07:
6882 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6883
6884 case 0x08: case 0x09: case 0x0a: case 0x0b:
6885 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6886 return arm_copy_unmodified (gdbarch, insn,
6887 "decode/pack/unpack/saturate/reverse", dsc);
6888
6889 case 0x18:
6890 if (bits (insn, 5, 7) == 0) /* op2. */
6891 {
6892 if (bits (insn, 12, 15) == 0xf)
6893 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6894 else
6895 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6896 }
6897 else
6898 return arm_copy_undef (gdbarch, insn, dsc);
6899
6900 case 0x1a: case 0x1b:
6901 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6902 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6903 else
6904 return arm_copy_undef (gdbarch, insn, dsc);
6905
6906 case 0x1c: case 0x1d:
6907 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6908 {
6909 if (bits (insn, 0, 3) == 0xf)
6910 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6911 else
6912 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6913 }
6914 else
6915 return arm_copy_undef (gdbarch, insn, dsc);
6916
6917 case 0x1e: case 0x1f:
6918 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6919 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6920 else
6921 return arm_copy_undef (gdbarch, insn, dsc);
6922 }
6923
6924 /* Should be unreachable. */
6925 return 1;
6926 }
6927
6928 static int
6929 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6930 struct regcache *regs,
6931 arm_displaced_step_copy_insn_closure *dsc)
6932 {
6933 if (bit (insn, 25))
6934 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6935 else
6936 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6937 }
6938
6939 static int
6940 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6941 struct regcache *regs,
6942 arm_displaced_step_copy_insn_closure *dsc)
6943 {
6944 unsigned int opcode = bits (insn, 20, 24);
6945
6946 switch (opcode)
6947 {
6948 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6949 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6950
6951 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6952 case 0x12: case 0x16:
6953 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6954
6955 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6956 case 0x13: case 0x17:
6957 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6958
6959 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6960 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6961 /* Note: no writeback for these instructions. Bit 25 will always be
6962 zero though (via caller), so the following works OK. */
6963 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6964 }
6965
6966 /* Should be unreachable. */
6967 return 1;
6968 }
6969
6970 /* Decode shifted register instructions. */
6971
6972 static int
6973 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6974 uint16_t insn2, struct regcache *regs,
6975 arm_displaced_step_copy_insn_closure *dsc)
6976 {
6977 /* PC is only allowed to be used in instruction MOV. */
6978
6979 unsigned int op = bits (insn1, 5, 8);
6980 unsigned int rn = bits (insn1, 0, 3);
6981
6982 if (op == 0x2 && rn == 0xf) /* MOV */
6983 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6984 else
6985 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6986 "dp (shift reg)", dsc);
6987 }
6988
6989
6990 /* Decode extension register load/store. Exactly the same as
6991 arm_decode_ext_reg_ld_st. */
6992
6993 static int
6994 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6995 uint16_t insn2, struct regcache *regs,
6996 arm_displaced_step_copy_insn_closure *dsc)
6997 {
6998 unsigned int opcode = bits (insn1, 4, 8);
6999
7000 switch (opcode)
7001 {
7002 case 0x04: case 0x05:
7003 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7004 "vfp/neon vmov", dsc);
7005
7006 case 0x08: case 0x0c: /* 01x00 */
7007 case 0x0a: case 0x0e: /* 01x10 */
7008 case 0x12: case 0x16: /* 10x10 */
7009 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7010 "vfp/neon vstm/vpush", dsc);
7011
7012 case 0x09: case 0x0d: /* 01x01 */
7013 case 0x0b: case 0x0f: /* 01x11 */
7014 case 0x13: case 0x17: /* 10x11 */
7015 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7016 "vfp/neon vldm/vpop", dsc);
7017
7018 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7019 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7020 "vstr", dsc);
7021 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7022 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7023 }
7024
7025 /* Should be unreachable. */
7026 return 1;
7027 }
7028
7029 static int
7030 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7031 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7032 {
7033 unsigned int op1 = bits (insn, 20, 25);
7034 int op = bit (insn, 4);
7035 unsigned int coproc = bits (insn, 8, 11);
7036
7037 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7038 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7039 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7040 && (coproc & 0xe) != 0xa)
7041 /* stc/stc2. */
7042 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7043 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7044 && (coproc & 0xe) != 0xa)
7045 /* ldc/ldc2 imm/lit. */
7046 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7047 else if ((op1 & 0x3e) == 0x00)
7048 return arm_copy_undef (gdbarch, insn, dsc);
7049 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7050 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7051 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7052 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7053 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7054 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7055 else if ((op1 & 0x30) == 0x20 && !op)
7056 {
7057 if ((coproc & 0xe) == 0xa)
7058 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7059 else
7060 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7061 }
7062 else if ((op1 & 0x30) == 0x20 && op)
7063 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7064 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7065 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7066 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7067 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7068 else if ((op1 & 0x30) == 0x30)
7069 return arm_copy_svc (gdbarch, insn, regs, dsc);
7070 else
7071 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7072 }
7073
7074 static int
7075 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7076 uint16_t insn2, struct regcache *regs,
7077 arm_displaced_step_copy_insn_closure *dsc)
7078 {
7079 unsigned int coproc = bits (insn2, 8, 11);
7080 unsigned int bit_5_8 = bits (insn1, 5, 8);
7081 unsigned int bit_9 = bit (insn1, 9);
7082 unsigned int bit_4 = bit (insn1, 4);
7083
7084 if (bit_9 == 0)
7085 {
7086 if (bit_5_8 == 2)
7087 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7088 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7089 dsc);
7090 else if (bit_5_8 == 0) /* UNDEFINED. */
7091 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7092 else
7093 {
7094 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7095 if ((coproc & 0xe) == 0xa)
7096 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7097 dsc);
7098 else /* coproc is not 101x. */
7099 {
7100 if (bit_4 == 0) /* STC/STC2. */
7101 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7102 "stc/stc2", dsc);
7103 else /* LDC/LDC2 {literal, immediate}. */
7104 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7105 regs, dsc);
7106 }
7107 }
7108 }
7109 else
7110 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7111
7112 return 0;
7113 }
7114
7115 static void
7116 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7117 arm_displaced_step_copy_insn_closure *dsc, int rd)
7118 {
7119 /* ADR Rd, #imm
7120
7121 Rewrite as:
7122
7123 Preparation: Rd <- PC
7124 Insn: ADD Rd, #imm
7125 Cleanup: Null.
7126 */
7127
7128 /* Rd <- PC */
7129 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7130 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7131 }
7132
7133 static int
7134 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7135 arm_displaced_step_copy_insn_closure *dsc,
7136 int rd, unsigned int imm)
7137 {
7138
7139 /* Encoding T2: ADDS Rd, #imm */
7140 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7141
7142 install_pc_relative (gdbarch, regs, dsc, rd);
7143
7144 return 0;
7145 }
7146
7147 static int
7148 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7149 struct regcache *regs,
7150 arm_displaced_step_copy_insn_closure *dsc)
7151 {
7152 unsigned int rd = bits (insn, 8, 10);
7153 unsigned int imm8 = bits (insn, 0, 7);
7154
7155 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7156 rd, imm8, insn);
7157
7158 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7159 }
7160
7161 static int
7162 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7163 uint16_t insn2, struct regcache *regs,
7164 arm_displaced_step_copy_insn_closure *dsc)
7165 {
7166 unsigned int rd = bits (insn2, 8, 11);
7167 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7168 extract raw immediate encoding rather than computing immediate. When
7169 generating ADD or SUB instruction, we can simply perform OR operation to
7170 set immediate into ADD. */
7171 unsigned int imm_3_8 = insn2 & 0x70ff;
7172 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7173
7174 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7175 rd, imm_i, imm_3_8, insn1, insn2);
7176
7177 if (bit (insn1, 7)) /* Encoding T2 */
7178 {
7179 /* Encoding T3: SUB Rd, Rd, #imm */
7180 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7181 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7182 }
7183 else /* Encoding T3 */
7184 {
7185 /* Encoding T3: ADD Rd, Rd, #imm */
7186 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7187 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7188 }
7189 dsc->numinsns = 2;
7190
7191 install_pc_relative (gdbarch, regs, dsc, rd);
7192
7193 return 0;
7194 }
7195
7196 static int
7197 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7198 struct regcache *regs,
7199 arm_displaced_step_copy_insn_closure *dsc)
7200 {
7201 unsigned int rt = bits (insn1, 8, 10);
7202 unsigned int pc;
7203 int imm8 = (bits (insn1, 0, 7) << 2);
7204
7205 /* LDR Rd, #imm8
7206
7207 Rwrite as:
7208
7209 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7210
7211 Insn: LDR R0, [R2, R3];
7212 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7213
7214 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7215
7216 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7217 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7218 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7219 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7220 /* The assembler calculates the required value of the offset from the
7221 Align(PC,4) value of this instruction to the label. */
7222 pc = pc & 0xfffffffc;
7223
7224 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7225 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7226
7227 dsc->rd = rt;
7228 dsc->u.ldst.xfersize = 4;
7229 dsc->u.ldst.rn = 0;
7230 dsc->u.ldst.immed = 0;
7231 dsc->u.ldst.writeback = 0;
7232 dsc->u.ldst.restore_r4 = 0;
7233
7234 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7235
7236 dsc->cleanup = &cleanup_load;
7237
7238 return 0;
7239 }
7240
7241 /* Copy Thumb cbnz/cbz instruction. */
7242
7243 static int
7244 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7245 struct regcache *regs,
7246 arm_displaced_step_copy_insn_closure *dsc)
7247 {
7248 int non_zero = bit (insn1, 11);
7249 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7250 CORE_ADDR from = dsc->insn_addr;
7251 int rn = bits (insn1, 0, 2);
7252 int rn_val = displaced_read_reg (regs, dsc, rn);
7253
7254 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7255 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7256 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7257 condition is false, let it be, cleanup_branch will do nothing. */
7258 if (dsc->u.branch.cond)
7259 {
7260 dsc->u.branch.cond = INST_AL;
7261 dsc->u.branch.dest = from + 4 + imm5;
7262 }
7263 else
7264 dsc->u.branch.dest = from + 2;
7265
7266 dsc->u.branch.link = 0;
7267 dsc->u.branch.exchange = 0;
7268
7269 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7270 non_zero ? "cbnz" : "cbz",
7271 rn, rn_val, insn1, dsc->u.branch.dest);
7272
7273 dsc->modinsn[0] = THUMB_NOP;
7274
7275 dsc->cleanup = &cleanup_branch;
7276 return 0;
7277 }
7278
7279 /* Copy Table Branch Byte/Halfword */
7280 static int
7281 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7282 uint16_t insn2, struct regcache *regs,
7283 arm_displaced_step_copy_insn_closure *dsc)
7284 {
7285 ULONGEST rn_val, rm_val;
7286 int is_tbh = bit (insn2, 4);
7287 CORE_ADDR halfwords = 0;
7288 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7289
7290 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7291 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7292
7293 if (is_tbh)
7294 {
7295 gdb_byte buf[2];
7296
7297 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7298 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7299 }
7300 else
7301 {
7302 gdb_byte buf[1];
7303
7304 target_read_memory (rn_val + rm_val, buf, 1);
7305 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7306 }
7307
7308 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7309 is_tbh ? "tbh" : "tbb",
7310 (unsigned int) rn_val, (unsigned int) rm_val,
7311 (unsigned int) halfwords);
7312
7313 dsc->u.branch.cond = INST_AL;
7314 dsc->u.branch.link = 0;
7315 dsc->u.branch.exchange = 0;
7316 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7317
7318 dsc->cleanup = &cleanup_branch;
7319
7320 return 0;
7321 }
7322
7323 static void
7324 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7325 arm_displaced_step_copy_insn_closure *dsc)
7326 {
7327 /* PC <- r7 */
7328 int val = displaced_read_reg (regs, dsc, 7);
7329 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7330
7331 /* r7 <- r8 */
7332 val = displaced_read_reg (regs, dsc, 8);
7333 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7334
7335 /* r8 <- tmp[0] */
7336 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7337
7338 }
7339
7340 static int
7341 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7342 struct regcache *regs,
7343 arm_displaced_step_copy_insn_closure *dsc)
7344 {
7345 dsc->u.block.regmask = insn1 & 0x00ff;
7346
7347 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7348 to :
7349
7350 (1) register list is full, that is, r0-r7 are used.
7351 Prepare: tmp[0] <- r8
7352
7353 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7354 MOV r8, r7; Move value of r7 to r8;
7355 POP {r7}; Store PC value into r7.
7356
7357 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7358
7359 (2) register list is not full, supposing there are N registers in
7360 register list (except PC, 0 <= N <= 7).
7361 Prepare: for each i, 0 - N, tmp[i] <- ri.
7362
7363 POP {r0, r1, ...., rN};
7364
7365 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7366 from tmp[] properly.
7367 */
7368 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7369 dsc->u.block.regmask, insn1);
7370
7371 if (dsc->u.block.regmask == 0xff)
7372 {
7373 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7374
7375 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7376 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7377 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7378
7379 dsc->numinsns = 3;
7380 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7381 }
7382 else
7383 {
7384 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7385 unsigned int i;
7386 unsigned int new_regmask;
7387
7388 for (i = 0; i < num_in_list + 1; i++)
7389 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7390
7391 new_regmask = (1 << (num_in_list + 1)) - 1;
7392
7393 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7394 "modified list %.4x",
7395 (int) dsc->u.block.regmask, new_regmask);
7396
7397 dsc->u.block.regmask |= 0x8000;
7398 dsc->u.block.writeback = 0;
7399 dsc->u.block.cond = INST_AL;
7400
7401 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7402
7403 dsc->cleanup = &cleanup_block_load_pc;
7404 }
7405
7406 return 0;
7407 }
7408
7409 static void
7410 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7411 struct regcache *regs,
7412 arm_displaced_step_copy_insn_closure *dsc)
7413 {
7414 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7415 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7416 int err = 0;
7417
7418 /* 16-bit thumb instructions. */
7419 switch (op_bit_12_15)
7420 {
7421 /* Shift (imme), add, subtract, move and compare. */
7422 case 0: case 1: case 2: case 3:
7423 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7424 "shift/add/sub/mov/cmp",
7425 dsc);
7426 break;
7427 case 4:
7428 switch (op_bit_10_11)
7429 {
7430 case 0: /* Data-processing */
7431 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7432 "data-processing",
7433 dsc);
7434 break;
7435 case 1: /* Special data instructions and branch and exchange. */
7436 {
7437 unsigned short op = bits (insn1, 7, 9);
7438 if (op == 6 || op == 7) /* BX or BLX */
7439 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7440 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7441 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7442 else
7443 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7444 dsc);
7445 }
7446 break;
7447 default: /* LDR (literal) */
7448 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7449 }
7450 break;
7451 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7452 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7453 break;
7454 case 10:
7455 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7456 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7457 else /* Generate SP-relative address */
7458 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7459 break;
7460 case 11: /* Misc 16-bit instructions */
7461 {
7462 switch (bits (insn1, 8, 11))
7463 {
7464 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7465 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7466 break;
7467 case 12: case 13: /* POP */
7468 if (bit (insn1, 8)) /* PC is in register list. */
7469 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7470 else
7471 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7472 break;
7473 case 15: /* If-Then, and hints */
7474 if (bits (insn1, 0, 3))
7475 /* If-Then makes up to four following instructions conditional.
7476 IT instruction itself is not conditional, so handle it as a
7477 common unmodified instruction. */
7478 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7479 dsc);
7480 else
7481 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7482 break;
7483 default:
7484 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7485 }
7486 }
7487 break;
7488 case 12:
7489 if (op_bit_10_11 < 2) /* Store multiple registers */
7490 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7491 else /* Load multiple registers */
7492 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7493 break;
7494 case 13: /* Conditional branch and supervisor call */
7495 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7496 err = thumb_copy_b (gdbarch, insn1, dsc);
7497 else
7498 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7499 break;
7500 case 14: /* Unconditional branch */
7501 err = thumb_copy_b (gdbarch, insn1, dsc);
7502 break;
7503 default:
7504 err = 1;
7505 }
7506
7507 if (err)
7508 internal_error (__FILE__, __LINE__,
7509 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7510 }
7511
7512 static int
7513 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7514 uint16_t insn1, uint16_t insn2,
7515 struct regcache *regs,
7516 arm_displaced_step_copy_insn_closure *dsc)
7517 {
7518 int rt = bits (insn2, 12, 15);
7519 int rn = bits (insn1, 0, 3);
7520 int op1 = bits (insn1, 7, 8);
7521
7522 switch (bits (insn1, 5, 6))
7523 {
7524 case 0: /* Load byte and memory hints */
7525 if (rt == 0xf) /* PLD/PLI */
7526 {
7527 if (rn == 0xf)
7528 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7529 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7530 else
7531 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7532 "pli/pld", dsc);
7533 }
7534 else
7535 {
7536 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7537 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7538 1);
7539 else
7540 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7541 "ldrb{reg, immediate}/ldrbt",
7542 dsc);
7543 }
7544
7545 break;
7546 case 1: /* Load halfword and memory hints. */
7547 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7548 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7549 "pld/unalloc memhint", dsc);
7550 else
7551 {
7552 if (rn == 0xf)
7553 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7554 2);
7555 else
7556 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7557 "ldrh/ldrht", dsc);
7558 }
7559 break;
7560 case 2: /* Load word */
7561 {
7562 int insn2_bit_8_11 = bits (insn2, 8, 11);
7563
7564 if (rn == 0xf)
7565 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7566 else if (op1 == 0x1) /* Encoding T3 */
7567 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7568 0, 1);
7569 else /* op1 == 0x0 */
7570 {
7571 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7572 /* LDR (immediate) */
7573 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7574 dsc, bit (insn2, 8), 1);
7575 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7576 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7577 "ldrt", dsc);
7578 else
7579 /* LDR (register) */
7580 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7581 dsc, 0, 0);
7582 }
7583 break;
7584 }
7585 default:
7586 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7587 break;
7588 }
7589 return 0;
7590 }
7591
7592 static void
7593 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7594 uint16_t insn2, struct regcache *regs,
7595 arm_displaced_step_copy_insn_closure *dsc)
7596 {
7597 int err = 0;
7598 unsigned short op = bit (insn2, 15);
7599 unsigned int op1 = bits (insn1, 11, 12);
7600
7601 switch (op1)
7602 {
7603 case 1:
7604 {
7605 switch (bits (insn1, 9, 10))
7606 {
7607 case 0:
7608 if (bit (insn1, 6))
7609 {
7610 /* Load/store {dual, exclusive}, table branch. */
7611 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7612 && bits (insn2, 5, 7) == 0)
7613 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7614 dsc);
7615 else
7616 /* PC is not allowed to use in load/store {dual, exclusive}
7617 instructions. */
7618 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7619 "load/store dual/ex", dsc);
7620 }
7621 else /* load/store multiple */
7622 {
7623 switch (bits (insn1, 7, 8))
7624 {
7625 case 0: case 3: /* SRS, RFE */
7626 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7627 "srs/rfe", dsc);
7628 break;
7629 case 1: case 2: /* LDM/STM/PUSH/POP */
7630 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7631 break;
7632 }
7633 }
7634 break;
7635
7636 case 1:
7637 /* Data-processing (shift register). */
7638 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7639 dsc);
7640 break;
7641 default: /* Coprocessor instructions. */
7642 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7643 break;
7644 }
7645 break;
7646 }
7647 case 2: /* op1 = 2 */
7648 if (op) /* Branch and misc control. */
7649 {
7650 if (bit (insn2, 14) /* BLX/BL */
7651 || bit (insn2, 12) /* Unconditional branch */
7652 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7653 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7654 else
7655 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7656 "misc ctrl", dsc);
7657 }
7658 else
7659 {
7660 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7661 {
7662 int dp_op = bits (insn1, 4, 8);
7663 int rn = bits (insn1, 0, 3);
7664 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7665 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7666 regs, dsc);
7667 else
7668 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7669 "dp/pb", dsc);
7670 }
7671 else /* Data processing (modified immediate) */
7672 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7673 "dp/mi", dsc);
7674 }
7675 break;
7676 case 3: /* op1 = 3 */
7677 switch (bits (insn1, 9, 10))
7678 {
7679 case 0:
7680 if (bit (insn1, 4))
7681 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7682 regs, dsc);
7683 else /* NEON Load/Store and Store single data item */
7684 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7685 "neon elt/struct load/store",
7686 dsc);
7687 break;
7688 case 1: /* op1 = 3, bits (9, 10) == 1 */
7689 switch (bits (insn1, 7, 8))
7690 {
7691 case 0: case 1: /* Data processing (register) */
7692 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7693 "dp(reg)", dsc);
7694 break;
7695 case 2: /* Multiply and absolute difference */
7696 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7697 "mul/mua/diff", dsc);
7698 break;
7699 case 3: /* Long multiply and divide */
7700 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7701 "lmul/lmua", dsc);
7702 break;
7703 }
7704 break;
7705 default: /* Coprocessor instructions */
7706 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7707 break;
7708 }
7709 break;
7710 default:
7711 err = 1;
7712 }
7713
7714 if (err)
7715 internal_error (__FILE__, __LINE__,
7716 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7717
7718 }
7719
7720 static void
7721 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7722 struct regcache *regs,
7723 arm_displaced_step_copy_insn_closure *dsc)
7724 {
7725 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7726 uint16_t insn1
7727 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7728
7729 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7730 insn1, (unsigned long) from);
7731
7732 dsc->is_thumb = 1;
7733 dsc->insn_size = thumb_insn_size (insn1);
7734 if (thumb_insn_size (insn1) == 4)
7735 {
7736 uint16_t insn2
7737 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7738 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7739 }
7740 else
7741 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7742 }
7743
7744 void
7745 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7746 CORE_ADDR to, struct regcache *regs,
7747 arm_displaced_step_copy_insn_closure *dsc)
7748 {
7749 int err = 0;
7750 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7751 uint32_t insn;
7752
7753 /* Most displaced instructions use a 1-instruction scratch space, so set this
7754 here and override below if/when necessary. */
7755 dsc->numinsns = 1;
7756 dsc->insn_addr = from;
7757 dsc->scratch_base = to;
7758 dsc->cleanup = NULL;
7759 dsc->wrote_to_pc = 0;
7760
7761 if (!displaced_in_arm_mode (regs))
7762 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7763
7764 dsc->is_thumb = 0;
7765 dsc->insn_size = 4;
7766 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7767 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7768 (unsigned long) insn, (unsigned long) from);
7769
7770 if ((insn & 0xf0000000) == 0xf0000000)
7771 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7772 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7773 {
7774 case 0x0: case 0x1: case 0x2: case 0x3:
7775 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7776 break;
7777
7778 case 0x4: case 0x5: case 0x6:
7779 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7780 break;
7781
7782 case 0x7:
7783 err = arm_decode_media (gdbarch, insn, dsc);
7784 break;
7785
7786 case 0x8: case 0x9: case 0xa: case 0xb:
7787 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7788 break;
7789
7790 case 0xc: case 0xd: case 0xe: case 0xf:
7791 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7792 break;
7793 }
7794
7795 if (err)
7796 internal_error (__FILE__, __LINE__,
7797 _("arm_process_displaced_insn: Instruction decode error"));
7798 }
7799
7800 /* Actually set up the scratch space for a displaced instruction. */
7801
7802 void
7803 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7804 CORE_ADDR to,
7805 arm_displaced_step_copy_insn_closure *dsc)
7806 {
7807 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7808 unsigned int i, len, offset;
7809 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7810 int size = dsc->is_thumb? 2 : 4;
7811 const gdb_byte *bkp_insn;
7812
7813 offset = 0;
7814 /* Poke modified instruction(s). */
7815 for (i = 0; i < dsc->numinsns; i++)
7816 {
7817 if (size == 4)
7818 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7819 dsc->modinsn[i], (unsigned long) to + offset);
7820 else if (size == 2)
7821 displaced_debug_printf ("writing insn %.4x at %.8lx",
7822 (unsigned short) dsc->modinsn[i],
7823 (unsigned long) to + offset);
7824
7825 write_memory_unsigned_integer (to + offset, size,
7826 byte_order_for_code,
7827 dsc->modinsn[i]);
7828 offset += size;
7829 }
7830
7831 /* Choose the correct breakpoint instruction. */
7832 if (dsc->is_thumb)
7833 {
7834 bkp_insn = tdep->thumb_breakpoint;
7835 len = tdep->thumb_breakpoint_size;
7836 }
7837 else
7838 {
7839 bkp_insn = tdep->arm_breakpoint;
7840 len = tdep->arm_breakpoint_size;
7841 }
7842
7843 /* Put breakpoint afterwards. */
7844 write_memory (to + offset, bkp_insn, len);
7845
7846 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7847 paddress (gdbarch, to));
7848 }
7849
7850 /* Entry point for cleaning things up after a displaced instruction has been
7851 single-stepped. */
7852
7853 void
7854 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7855 struct displaced_step_copy_insn_closure *dsc_,
7856 CORE_ADDR from, CORE_ADDR to,
7857 struct regcache *regs)
7858 {
7859 arm_displaced_step_copy_insn_closure *dsc
7860 = (arm_displaced_step_copy_insn_closure *) dsc_;
7861
7862 if (dsc->cleanup)
7863 dsc->cleanup (gdbarch, regs, dsc);
7864
7865 if (!dsc->wrote_to_pc)
7866 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7867 dsc->insn_addr + dsc->insn_size);
7868
7869 }
7870
7871 #include "bfd-in2.h"
7872 #include "libcoff.h"
7873
7874 static int
7875 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7876 {
7877 gdb_disassembler *di
7878 = static_cast<gdb_disassembler *>(info->application_data);
7879 struct gdbarch *gdbarch = di->arch ();
7880
7881 if (arm_pc_is_thumb (gdbarch, memaddr))
7882 {
7883 static asymbol *asym;
7884 static combined_entry_type ce;
7885 static struct coff_symbol_struct csym;
7886 static struct bfd fake_bfd;
7887 static bfd_target fake_target;
7888
7889 if (csym.native == NULL)
7890 {
7891 /* Create a fake symbol vector containing a Thumb symbol.
7892 This is solely so that the code in print_insn_little_arm()
7893 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7894 the presence of a Thumb symbol and switch to decoding
7895 Thumb instructions. */
7896
7897 fake_target.flavour = bfd_target_coff_flavour;
7898 fake_bfd.xvec = &fake_target;
7899 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7900 csym.native = &ce;
7901 csym.symbol.the_bfd = &fake_bfd;
7902 csym.symbol.name = "fake";
7903 asym = (asymbol *) & csym;
7904 }
7905
7906 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7907 info->symbols = &asym;
7908 }
7909 else
7910 info->symbols = NULL;
7911
7912 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7913 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7914 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7915 the assert on the mismatch of info->mach and
7916 bfd_get_mach (current_program_space->exec_bfd ()) in
7917 default_print_insn. */
7918 if (current_program_space->exec_bfd () != NULL
7919 && (current_program_space->exec_bfd ()->arch_info
7920 == gdbarch_bfd_arch_info (gdbarch)))
7921 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7922
7923 return default_print_insn (memaddr, info);
7924 }
7925
7926 /* The following define instruction sequences that will cause ARM
7927 cpu's to take an undefined instruction trap. These are used to
7928 signal a breakpoint to GDB.
7929
7930 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7931 modes. A different instruction is required for each mode. The ARM
7932 cpu's can also be big or little endian. Thus four different
7933 instructions are needed to support all cases.
7934
7935 Note: ARMv4 defines several new instructions that will take the
7936 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7937 not in fact add the new instructions. The new undefined
7938 instructions in ARMv4 are all instructions that had no defined
7939 behaviour in earlier chips. There is no guarantee that they will
7940 raise an exception, but may be treated as NOP's. In practice, it
7941 may only safe to rely on instructions matching:
7942
7943 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7944 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7945 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7946
7947 Even this may only true if the condition predicate is true. The
7948 following use a condition predicate of ALWAYS so it is always TRUE.
7949
7950 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7951 and NetBSD all use a software interrupt rather than an undefined
7952 instruction to force a trap. This can be handled by by the
7953 abi-specific code during establishment of the gdbarch vector. */
7954
7955 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7956 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7957 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7958 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7959
7960 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7961 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7962 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7963 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7964
7965 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7966
7967 static int
7968 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7969 {
7970 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7971 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7972
7973 if (arm_pc_is_thumb (gdbarch, *pcptr))
7974 {
7975 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7976
7977 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7978 check whether we are replacing a 32-bit instruction. */
7979 if (tdep->thumb2_breakpoint != NULL)
7980 {
7981 gdb_byte buf[2];
7982
7983 if (target_read_memory (*pcptr, buf, 2) == 0)
7984 {
7985 unsigned short inst1;
7986
7987 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7988 if (thumb_insn_size (inst1) == 4)
7989 return ARM_BP_KIND_THUMB2;
7990 }
7991 }
7992
7993 return ARM_BP_KIND_THUMB;
7994 }
7995 else
7996 return ARM_BP_KIND_ARM;
7997
7998 }
7999
8000 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8001
8002 static const gdb_byte *
8003 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8004 {
8005 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8006
8007 switch (kind)
8008 {
8009 case ARM_BP_KIND_ARM:
8010 *size = tdep->arm_breakpoint_size;
8011 return tdep->arm_breakpoint;
8012 case ARM_BP_KIND_THUMB:
8013 *size = tdep->thumb_breakpoint_size;
8014 return tdep->thumb_breakpoint;
8015 case ARM_BP_KIND_THUMB2:
8016 *size = tdep->thumb2_breakpoint_size;
8017 return tdep->thumb2_breakpoint;
8018 default:
8019 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8020 }
8021 }
8022
8023 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8024
8025 static int
8026 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8027 struct regcache *regcache,
8028 CORE_ADDR *pcptr)
8029 {
8030 gdb_byte buf[4];
8031
8032 /* Check the memory pointed by PC is readable. */
8033 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8034 {
8035 struct arm_get_next_pcs next_pcs_ctx;
8036
8037 arm_get_next_pcs_ctor (&next_pcs_ctx,
8038 &arm_get_next_pcs_ops,
8039 gdbarch_byte_order (gdbarch),
8040 gdbarch_byte_order_for_code (gdbarch),
8041 0,
8042 regcache);
8043
8044 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8045
8046 /* If MEMADDR is the next instruction of current pc, do the
8047 software single step computation, and get the thumb mode by
8048 the destination address. */
8049 for (CORE_ADDR pc : next_pcs)
8050 {
8051 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8052 {
8053 if (IS_THUMB_ADDR (pc))
8054 {
8055 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8056 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8057 }
8058 else
8059 return ARM_BP_KIND_ARM;
8060 }
8061 }
8062 }
8063
8064 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8065 }
8066
8067 /* Extract from an array REGBUF containing the (raw) register state a
8068 function return value of type TYPE, and copy that, in virtual
8069 format, into VALBUF. */
8070
8071 static void
8072 arm_extract_return_value (struct type *type, struct regcache *regs,
8073 gdb_byte *valbuf)
8074 {
8075 struct gdbarch *gdbarch = regs->arch ();
8076 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8077 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8078
8079 if (TYPE_CODE_FLT == type->code ())
8080 {
8081 switch (tdep->fp_model)
8082 {
8083 case ARM_FLOAT_FPA:
8084 {
8085 /* The value is in register F0 in internal format. We need to
8086 extract the raw value and then convert it to the desired
8087 internal type. */
8088 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8089
8090 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8091 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8092 valbuf, type);
8093 }
8094 break;
8095
8096 case ARM_FLOAT_SOFT_FPA:
8097 case ARM_FLOAT_SOFT_VFP:
8098 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8099 not using the VFP ABI code. */
8100 case ARM_FLOAT_VFP:
8101 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8102 if (TYPE_LENGTH (type) > 4)
8103 regs->cooked_read (ARM_A1_REGNUM + 1,
8104 valbuf + ARM_INT_REGISTER_SIZE);
8105 break;
8106
8107 default:
8108 internal_error (__FILE__, __LINE__,
8109 _("arm_extract_return_value: "
8110 "Floating point model not supported"));
8111 break;
8112 }
8113 }
8114 else if (type->code () == TYPE_CODE_INT
8115 || type->code () == TYPE_CODE_CHAR
8116 || type->code () == TYPE_CODE_BOOL
8117 || type->code () == TYPE_CODE_PTR
8118 || TYPE_IS_REFERENCE (type)
8119 || type->code () == TYPE_CODE_ENUM
8120 || is_fixed_point_type (type))
8121 {
8122 /* If the type is a plain integer, then the access is
8123 straight-forward. Otherwise we have to play around a bit
8124 more. */
8125 int len = TYPE_LENGTH (type);
8126 int regno = ARM_A1_REGNUM;
8127 ULONGEST tmp;
8128
8129 while (len > 0)
8130 {
8131 /* By using store_unsigned_integer we avoid having to do
8132 anything special for small big-endian values. */
8133 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8134 store_unsigned_integer (valbuf,
8135 (len > ARM_INT_REGISTER_SIZE
8136 ? ARM_INT_REGISTER_SIZE : len),
8137 byte_order, tmp);
8138 len -= ARM_INT_REGISTER_SIZE;
8139 valbuf += ARM_INT_REGISTER_SIZE;
8140 }
8141 }
8142 else
8143 {
8144 /* For a structure or union the behaviour is as if the value had
8145 been stored to word-aligned memory and then loaded into
8146 registers with 32-bit load instruction(s). */
8147 int len = TYPE_LENGTH (type);
8148 int regno = ARM_A1_REGNUM;
8149 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8150
8151 while (len > 0)
8152 {
8153 regs->cooked_read (regno++, tmpbuf);
8154 memcpy (valbuf, tmpbuf,
8155 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8156 len -= ARM_INT_REGISTER_SIZE;
8157 valbuf += ARM_INT_REGISTER_SIZE;
8158 }
8159 }
8160 }
8161
8162
8163 /* Will a function return an aggregate type in memory or in a
8164 register? Return 0 if an aggregate type can be returned in a
8165 register, 1 if it must be returned in memory. */
8166
8167 static int
8168 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8169 {
8170 enum type_code code;
8171
8172 type = check_typedef (type);
8173
8174 /* Simple, non-aggregate types (ie not including vectors and
8175 complex) are always returned in a register (or registers). */
8176 code = type->code ();
8177 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8178 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8179 return 0;
8180
8181 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8182 {
8183 /* Vector values should be returned using ARM registers if they
8184 are not over 16 bytes. */
8185 return (TYPE_LENGTH (type) > 16);
8186 }
8187
8188 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8189 if (tdep->arm_abi != ARM_ABI_APCS)
8190 {
8191 /* The AAPCS says all aggregates not larger than a word are returned
8192 in a register. */
8193 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE
8194 && language_pass_by_reference (type).trivially_copyable)
8195 return 0;
8196
8197 return 1;
8198 }
8199 else
8200 {
8201 int nRc;
8202
8203 /* All aggregate types that won't fit in a register must be returned
8204 in memory. */
8205 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE
8206 || !language_pass_by_reference (type).trivially_copyable)
8207 return 1;
8208
8209 /* In the ARM ABI, "integer" like aggregate types are returned in
8210 registers. For an aggregate type to be integer like, its size
8211 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8212 offset of each addressable subfield must be zero. Note that bit
8213 fields are not addressable, and all addressable subfields of
8214 unions always start at offset zero.
8215
8216 This function is based on the behaviour of GCC 2.95.1.
8217 See: gcc/arm.c: arm_return_in_memory() for details.
8218
8219 Note: All versions of GCC before GCC 2.95.2 do not set up the
8220 parameters correctly for a function returning the following
8221 structure: struct { float f;}; This should be returned in memory,
8222 not a register. Richard Earnshaw sent me a patch, but I do not
8223 know of any way to detect if a function like the above has been
8224 compiled with the correct calling convention. */
8225
8226 /* Assume all other aggregate types can be returned in a register.
8227 Run a check for structures, unions and arrays. */
8228 nRc = 0;
8229
8230 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8231 {
8232 int i;
8233 /* Need to check if this struct/union is "integer" like. For
8234 this to be true, its size must be less than or equal to
8235 ARM_INT_REGISTER_SIZE and the offset of each addressable
8236 subfield must be zero. Note that bit fields are not
8237 addressable, and unions always start at offset zero. If any
8238 of the subfields is a floating point type, the struct/union
8239 cannot be an integer type. */
8240
8241 /* For each field in the object, check:
8242 1) Is it FP? --> yes, nRc = 1;
8243 2) Is it addressable (bitpos != 0) and
8244 not packed (bitsize == 0)?
8245 --> yes, nRc = 1
8246 */
8247
8248 for (i = 0; i < type->num_fields (); i++)
8249 {
8250 enum type_code field_type_code;
8251
8252 field_type_code
8253 = check_typedef (type->field (i).type ())->code ();
8254
8255 /* Is it a floating point type field? */
8256 if (field_type_code == TYPE_CODE_FLT)
8257 {
8258 nRc = 1;
8259 break;
8260 }
8261
8262 /* If bitpos != 0, then we have to care about it. */
8263 if (type->field (i).loc_bitpos () != 0)
8264 {
8265 /* Bitfields are not addressable. If the field bitsize is
8266 zero, then the field is not packed. Hence it cannot be
8267 a bitfield or any other packed type. */
8268 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8269 {
8270 nRc = 1;
8271 break;
8272 }
8273 }
8274 }
8275 }
8276
8277 return nRc;
8278 }
8279 }
8280
8281 /* Write into appropriate registers a function return value of type
8282 TYPE, given in virtual format. */
8283
8284 static void
8285 arm_store_return_value (struct type *type, struct regcache *regs,
8286 const gdb_byte *valbuf)
8287 {
8288 struct gdbarch *gdbarch = regs->arch ();
8289 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8290
8291 if (type->code () == TYPE_CODE_FLT)
8292 {
8293 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8294 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8295
8296 switch (tdep->fp_model)
8297 {
8298 case ARM_FLOAT_FPA:
8299
8300 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8301 regs->cooked_write (ARM_F0_REGNUM, buf);
8302 break;
8303
8304 case ARM_FLOAT_SOFT_FPA:
8305 case ARM_FLOAT_SOFT_VFP:
8306 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8307 not using the VFP ABI code. */
8308 case ARM_FLOAT_VFP:
8309 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8310 if (TYPE_LENGTH (type) > 4)
8311 regs->cooked_write (ARM_A1_REGNUM + 1,
8312 valbuf + ARM_INT_REGISTER_SIZE);
8313 break;
8314
8315 default:
8316 internal_error (__FILE__, __LINE__,
8317 _("arm_store_return_value: Floating "
8318 "point model not supported"));
8319 break;
8320 }
8321 }
8322 else if (type->code () == TYPE_CODE_INT
8323 || type->code () == TYPE_CODE_CHAR
8324 || type->code () == TYPE_CODE_BOOL
8325 || type->code () == TYPE_CODE_PTR
8326 || TYPE_IS_REFERENCE (type)
8327 || type->code () == TYPE_CODE_ENUM)
8328 {
8329 if (TYPE_LENGTH (type) <= 4)
8330 {
8331 /* Values of one word or less are zero/sign-extended and
8332 returned in r0. */
8333 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8334 LONGEST val = unpack_long (type, valbuf);
8335
8336 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8337 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8338 }
8339 else
8340 {
8341 /* Integral values greater than one word are stored in consecutive
8342 registers starting with r0. This will always be a multiple of
8343 the regiser size. */
8344 int len = TYPE_LENGTH (type);
8345 int regno = ARM_A1_REGNUM;
8346
8347 while (len > 0)
8348 {
8349 regs->cooked_write (regno++, valbuf);
8350 len -= ARM_INT_REGISTER_SIZE;
8351 valbuf += ARM_INT_REGISTER_SIZE;
8352 }
8353 }
8354 }
8355 else
8356 {
8357 /* For a structure or union the behaviour is as if the value had
8358 been stored to word-aligned memory and then loaded into
8359 registers with 32-bit load instruction(s). */
8360 int len = TYPE_LENGTH (type);
8361 int regno = ARM_A1_REGNUM;
8362 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8363
8364 while (len > 0)
8365 {
8366 memcpy (tmpbuf, valbuf,
8367 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8368 regs->cooked_write (regno++, tmpbuf);
8369 len -= ARM_INT_REGISTER_SIZE;
8370 valbuf += ARM_INT_REGISTER_SIZE;
8371 }
8372 }
8373 }
8374
8375
8376 /* Handle function return values. */
8377
8378 static enum return_value_convention
8379 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8380 struct type *valtype, struct regcache *regcache,
8381 gdb_byte *readbuf, const gdb_byte *writebuf)
8382 {
8383 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8384 struct type *func_type = function ? value_type (function) : NULL;
8385 enum arm_vfp_cprc_base_type vfp_base_type;
8386 int vfp_base_count;
8387
8388 if (arm_vfp_abi_for_function (gdbarch, func_type)
8389 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8390 {
8391 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8392 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8393 int i;
8394 for (i = 0; i < vfp_base_count; i++)
8395 {
8396 if (reg_char == 'q')
8397 {
8398 if (writebuf)
8399 arm_neon_quad_write (gdbarch, regcache, i,
8400 writebuf + i * unit_length);
8401
8402 if (readbuf)
8403 arm_neon_quad_read (gdbarch, regcache, i,
8404 readbuf + i * unit_length);
8405 }
8406 else
8407 {
8408 char name_buf[4];
8409 int regnum;
8410
8411 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8412 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8413 strlen (name_buf));
8414 if (writebuf)
8415 regcache->cooked_write (regnum, writebuf + i * unit_length);
8416 if (readbuf)
8417 regcache->cooked_read (regnum, readbuf + i * unit_length);
8418 }
8419 }
8420 return RETURN_VALUE_REGISTER_CONVENTION;
8421 }
8422
8423 if (valtype->code () == TYPE_CODE_STRUCT
8424 || valtype->code () == TYPE_CODE_UNION
8425 || valtype->code () == TYPE_CODE_ARRAY)
8426 {
8427 /* From the AAPCS document:
8428
8429 Result return:
8430
8431 A Composite Type larger than 4 bytes, or whose size cannot be
8432 determined statically by both caller and callee, is stored in memory
8433 at an address passed as an extra argument when the function was
8434 called (Parameter Passing, rule A.4). The memory to be used for the
8435 result may be modified at any point during the function call.
8436
8437 Parameter Passing:
8438
8439 A.4: If the subroutine is a function that returns a result in memory,
8440 then the address for the result is placed in r0 and the NCRN is set
8441 to r1. */
8442 if (tdep->struct_return == pcc_struct_return
8443 || arm_return_in_memory (gdbarch, valtype))
8444 {
8445 if (readbuf)
8446 {
8447 CORE_ADDR addr;
8448
8449 regcache->cooked_read (ARM_A1_REGNUM, &addr);
8450 read_memory (addr, readbuf, TYPE_LENGTH (valtype));
8451 }
8452 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
8453 }
8454 }
8455 else if (valtype->code () == TYPE_CODE_COMPLEX)
8456 {
8457 if (arm_return_in_memory (gdbarch, valtype))
8458 return RETURN_VALUE_STRUCT_CONVENTION;
8459 }
8460
8461 if (writebuf)
8462 arm_store_return_value (valtype, regcache, writebuf);
8463
8464 if (readbuf)
8465 arm_extract_return_value (valtype, regcache, readbuf);
8466
8467 return RETURN_VALUE_REGISTER_CONVENTION;
8468 }
8469
8470
8471 static int
8472 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8473 {
8474 struct gdbarch *gdbarch = get_frame_arch (frame);
8475 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8476 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8477 CORE_ADDR jb_addr;
8478 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8479
8480 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8481
8482 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8483 ARM_INT_REGISTER_SIZE))
8484 return 0;
8485
8486 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8487 return 1;
8488 }
8489 /* A call to cmse secure entry function "foo" at "a" is modified by
8490 GNU ld as "b".
8491 a) bl xxxx <foo>
8492
8493 <foo>
8494 xxxx:
8495
8496 b) bl yyyy <__acle_se_foo>
8497
8498 section .gnu.sgstubs:
8499 <foo>
8500 yyyy: sg // secure gateway
8501 b.w xxxx <__acle_se_foo> // original_branch_dest
8502
8503 <__acle_se_foo>
8504 xxxx:
8505
8506 When the control at "b", the pc contains "yyyy" (sg address) which is a
8507 trampoline and does not exist in source code. This function returns the
8508 target pc "xxxx". For more details please refer to section 5.4
8509 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8510 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8511 document on www.developer.arm.com. */
8512
8513 static CORE_ADDR
8514 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8515 {
8516 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8517 char *target_name = (char *) alloca (target_len);
8518 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8519
8520 struct bound_minimal_symbol minsym
8521 = lookup_minimal_symbol (target_name, NULL, objfile);
8522
8523 if (minsym.minsym != nullptr)
8524 return BMSYMBOL_VALUE_ADDRESS (minsym);
8525
8526 return 0;
8527 }
8528
8529 /* Return true when SEC points to ".gnu.sgstubs" section. */
8530
8531 static bool
8532 arm_is_sgstubs_section (struct obj_section *sec)
8533 {
8534 return (sec != nullptr
8535 && sec->the_bfd_section != nullptr
8536 && sec->the_bfd_section->name != nullptr
8537 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8538 }
8539
8540 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8541 return the target PC. Otherwise return 0. */
8542
8543 CORE_ADDR
8544 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8545 {
8546 const char *name;
8547 int namelen;
8548 CORE_ADDR start_addr;
8549
8550 /* Find the starting address and name of the function containing the PC. */
8551 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8552 {
8553 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8554 check here. */
8555 start_addr = arm_skip_bx_reg (frame, pc);
8556 if (start_addr != 0)
8557 return start_addr;
8558
8559 return 0;
8560 }
8561
8562 /* If PC is in a Thumb call or return stub, return the address of the
8563 target PC, which is in a register. The thunk functions are called
8564 _call_via_xx, where x is the register name. The possible names
8565 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8566 functions, named __ARM_call_via_r[0-7]. */
8567 if (startswith (name, "_call_via_")
8568 || startswith (name, "__ARM_call_via_"))
8569 {
8570 /* Use the name suffix to determine which register contains the
8571 target PC. */
8572 static const char *table[15] =
8573 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8574 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8575 };
8576 int regno;
8577 int offset = strlen (name) - 2;
8578
8579 for (regno = 0; regno <= 14; regno++)
8580 if (strcmp (&name[offset], table[regno]) == 0)
8581 return get_frame_register_unsigned (frame, regno);
8582 }
8583
8584 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8585 non-interworking calls to foo. We could decode the stubs
8586 to find the target but it's easier to use the symbol table. */
8587 namelen = strlen (name);
8588 if (name[0] == '_' && name[1] == '_'
8589 && ((namelen > 2 + strlen ("_from_thumb")
8590 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8591 || (namelen > 2 + strlen ("_from_arm")
8592 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8593 {
8594 char *target_name;
8595 int target_len = namelen - 2;
8596 struct bound_minimal_symbol minsym;
8597 struct objfile *objfile;
8598 struct obj_section *sec;
8599
8600 if (name[namelen - 1] == 'b')
8601 target_len -= strlen ("_from_thumb");
8602 else
8603 target_len -= strlen ("_from_arm");
8604
8605 target_name = (char *) alloca (target_len + 1);
8606 memcpy (target_name, name + 2, target_len);
8607 target_name[target_len] = '\0';
8608
8609 sec = find_pc_section (pc);
8610 objfile = (sec == NULL) ? NULL : sec->objfile;
8611 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8612 if (minsym.minsym != NULL)
8613 return BMSYMBOL_VALUE_ADDRESS (minsym);
8614 else
8615 return 0;
8616 }
8617
8618 struct obj_section *section = find_pc_section (pc);
8619
8620 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8621 if (arm_is_sgstubs_section (section))
8622 return arm_skip_cmse_entry (pc, name, section->objfile);
8623
8624 return 0; /* not a stub */
8625 }
8626
8627 static void
8628 arm_update_current_architecture (void)
8629 {
8630 /* If the current architecture is not ARM, we have nothing to do. */
8631 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8632 return;
8633
8634 /* Update the architecture. */
8635 gdbarch_info info;
8636 if (!gdbarch_update_p (info))
8637 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8638 }
8639
8640 static void
8641 set_fp_model_sfunc (const char *args, int from_tty,
8642 struct cmd_list_element *c)
8643 {
8644 int fp_model;
8645
8646 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8647 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8648 {
8649 arm_fp_model = (enum arm_float_model) fp_model;
8650 break;
8651 }
8652
8653 if (fp_model == ARM_FLOAT_LAST)
8654 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8655 current_fp_model);
8656
8657 arm_update_current_architecture ();
8658 }
8659
8660 static void
8661 show_fp_model (struct ui_file *file, int from_tty,
8662 struct cmd_list_element *c, const char *value)
8663 {
8664 arm_gdbarch_tdep *tdep
8665 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8666
8667 if (arm_fp_model == ARM_FLOAT_AUTO
8668 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8669 gdb_printf (file, _("\
8670 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8671 fp_model_strings[tdep->fp_model]);
8672 else
8673 gdb_printf (file, _("\
8674 The current ARM floating point model is \"%s\".\n"),
8675 fp_model_strings[arm_fp_model]);
8676 }
8677
8678 static void
8679 arm_set_abi (const char *args, int from_tty,
8680 struct cmd_list_element *c)
8681 {
8682 int arm_abi;
8683
8684 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8685 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8686 {
8687 arm_abi_global = (enum arm_abi_kind) arm_abi;
8688 break;
8689 }
8690
8691 if (arm_abi == ARM_ABI_LAST)
8692 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8693 arm_abi_string);
8694
8695 arm_update_current_architecture ();
8696 }
8697
8698 static void
8699 arm_show_abi (struct ui_file *file, int from_tty,
8700 struct cmd_list_element *c, const char *value)
8701 {
8702 arm_gdbarch_tdep *tdep
8703 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8704
8705 if (arm_abi_global == ARM_ABI_AUTO
8706 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8707 gdb_printf (file, _("\
8708 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8709 arm_abi_strings[tdep->arm_abi]);
8710 else
8711 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
8712 arm_abi_string);
8713 }
8714
8715 static void
8716 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8717 struct cmd_list_element *c, const char *value)
8718 {
8719 gdb_printf (file,
8720 _("The current execution mode assumed "
8721 "(when symbols are unavailable) is \"%s\".\n"),
8722 arm_fallback_mode_string);
8723 }
8724
8725 static void
8726 arm_show_force_mode (struct ui_file *file, int from_tty,
8727 struct cmd_list_element *c, const char *value)
8728 {
8729 gdb_printf (file,
8730 _("The current execution mode assumed "
8731 "(even when symbols are available) is \"%s\".\n"),
8732 arm_force_mode_string);
8733 }
8734
8735 /* If the user changes the register disassembly style used for info
8736 register and other commands, we have to also switch the style used
8737 in opcodes for disassembly output. This function is run in the "set
8738 arm disassembly" command, and does that. */
8739
8740 static void
8741 set_disassembly_style_sfunc (const char *args, int from_tty,
8742 struct cmd_list_element *c)
8743 {
8744 /* Convert the short style name into the long style name (eg, reg-names-*)
8745 before calling the generic set_disassembler_options() function. */
8746 std::string long_name = std::string ("reg-names-") + disassembly_style;
8747 set_disassembler_options (&long_name[0]);
8748 }
8749
8750 static void
8751 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8752 struct cmd_list_element *c, const char *value)
8753 {
8754 struct gdbarch *gdbarch = get_current_arch ();
8755 char *options = get_disassembler_options (gdbarch);
8756 const char *style = "";
8757 int len = 0;
8758 const char *opt;
8759
8760 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8761 if (startswith (opt, "reg-names-"))
8762 {
8763 style = &opt[strlen ("reg-names-")];
8764 len = strcspn (style, ",");
8765 }
8766
8767 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
8768 }
8769 \f
8770 /* Return the ARM register name corresponding to register I. */
8771 static const char *
8772 arm_register_name (struct gdbarch *gdbarch, int i)
8773 {
8774 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8775
8776 if (is_s_pseudo (gdbarch, i))
8777 {
8778 static const char *const s_pseudo_names[] = {
8779 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8780 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8781 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8782 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8783 };
8784
8785 return s_pseudo_names[i - tdep->s_pseudo_base];
8786 }
8787
8788 if (is_q_pseudo (gdbarch, i))
8789 {
8790 static const char *const q_pseudo_names[] = {
8791 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8792 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8793 };
8794
8795 return q_pseudo_names[i - tdep->q_pseudo_base];
8796 }
8797
8798 if (is_mve_pseudo (gdbarch, i))
8799 return "p0";
8800
8801 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
8802 if (is_pacbti_pseudo (gdbarch, i))
8803 return "";
8804
8805 if (i >= ARRAY_SIZE (arm_register_names))
8806 /* These registers are only supported on targets which supply
8807 an XML description. */
8808 return "";
8809
8810 /* Non-pseudo registers. */
8811 return arm_register_names[i];
8812 }
8813
8814 /* Test whether the coff symbol specific value corresponds to a Thumb
8815 function. */
8816
8817 static int
8818 coff_sym_is_thumb (int val)
8819 {
8820 return (val == C_THUMBEXT
8821 || val == C_THUMBSTAT
8822 || val == C_THUMBEXTFUNC
8823 || val == C_THUMBSTATFUNC
8824 || val == C_THUMBLABEL);
8825 }
8826
8827 /* arm_coff_make_msymbol_special()
8828 arm_elf_make_msymbol_special()
8829
8830 These functions test whether the COFF or ELF symbol corresponds to
8831 an address in thumb code, and set a "special" bit in a minimal
8832 symbol to indicate that it does. */
8833
8834 static void
8835 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8836 {
8837 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8838
8839 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8840 == ST_BRANCH_TO_THUMB)
8841 MSYMBOL_SET_SPECIAL (msym);
8842 }
8843
8844 static void
8845 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8846 {
8847 if (coff_sym_is_thumb (val))
8848 MSYMBOL_SET_SPECIAL (msym);
8849 }
8850
8851 static void
8852 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8853 asymbol *sym)
8854 {
8855 const char *name = bfd_asymbol_name (sym);
8856 struct arm_per_bfd *data;
8857 struct arm_mapping_symbol new_map_sym;
8858
8859 gdb_assert (name[0] == '$');
8860 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8861 return;
8862
8863 data = arm_bfd_data_key.get (objfile->obfd);
8864 if (data == NULL)
8865 data = arm_bfd_data_key.emplace (objfile->obfd,
8866 objfile->obfd->section_count);
8867 arm_mapping_symbol_vec &map
8868 = data->section_maps[bfd_asymbol_section (sym)->index];
8869
8870 new_map_sym.value = sym->value;
8871 new_map_sym.type = name[1];
8872
8873 /* Insert at the end, the vector will be sorted on first use. */
8874 map.push_back (new_map_sym);
8875 }
8876
8877 static void
8878 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8879 {
8880 struct gdbarch *gdbarch = regcache->arch ();
8881 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8882
8883 /* If necessary, set the T bit. */
8884 if (arm_apcs_32)
8885 {
8886 ULONGEST val, t_bit;
8887 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8888 t_bit = arm_psr_thumb_bit (gdbarch);
8889 if (arm_pc_is_thumb (gdbarch, pc))
8890 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8891 val | t_bit);
8892 else
8893 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8894 val & ~t_bit);
8895 }
8896 }
8897
8898 /* Read the contents of a NEON quad register, by reading from two
8899 double registers. This is used to implement the quad pseudo
8900 registers, and for argument passing in case the quad registers are
8901 missing; vectors are passed in quad registers when using the VFP
8902 ABI, even if a NEON unit is not present. REGNUM is the index of
8903 the quad register, in [0, 15]. */
8904
8905 static enum register_status
8906 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8907 int regnum, gdb_byte *buf)
8908 {
8909 char name_buf[4];
8910 gdb_byte reg_buf[8];
8911 int offset, double_regnum;
8912 enum register_status status;
8913
8914 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8915 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8916 strlen (name_buf));
8917
8918 /* d0 is always the least significant half of q0. */
8919 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8920 offset = 8;
8921 else
8922 offset = 0;
8923
8924 status = regcache->raw_read (double_regnum, reg_buf);
8925 if (status != REG_VALID)
8926 return status;
8927 memcpy (buf + offset, reg_buf, 8);
8928
8929 offset = 8 - offset;
8930 status = regcache->raw_read (double_regnum + 1, reg_buf);
8931 if (status != REG_VALID)
8932 return status;
8933 memcpy (buf + offset, reg_buf, 8);
8934
8935 return REG_VALID;
8936 }
8937
8938 /* Read the contents of the MVE pseudo register REGNUM and store it
8939 in BUF. */
8940
8941 static enum register_status
8942 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8943 int regnum, gdb_byte *buf)
8944 {
8945 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8946
8947 /* P0 is the first 16 bits of VPR. */
8948 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
8949 }
8950
8951 static enum register_status
8952 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8953 int regnum, gdb_byte *buf)
8954 {
8955 const int num_regs = gdbarch_num_regs (gdbarch);
8956 char name_buf[4];
8957 gdb_byte reg_buf[8];
8958 int offset, double_regnum;
8959 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8960
8961 gdb_assert (regnum >= num_regs);
8962
8963 if (is_q_pseudo (gdbarch, regnum))
8964 {
8965 /* Quad-precision register. */
8966 return arm_neon_quad_read (gdbarch, regcache,
8967 regnum - tdep->q_pseudo_base, buf);
8968 }
8969 else if (is_mve_pseudo (gdbarch, regnum))
8970 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
8971 else
8972 {
8973 enum register_status status;
8974
8975 regnum -= tdep->s_pseudo_base;
8976 /* Single-precision register. */
8977 gdb_assert (regnum < 32);
8978
8979 /* s0 is always the least significant half of d0. */
8980 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8981 offset = (regnum & 1) ? 0 : 4;
8982 else
8983 offset = (regnum & 1) ? 4 : 0;
8984
8985 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8986 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8987 strlen (name_buf));
8988
8989 status = regcache->raw_read (double_regnum, reg_buf);
8990 if (status == REG_VALID)
8991 memcpy (buf, reg_buf + offset, 4);
8992 return status;
8993 }
8994 }
8995
8996 /* Store the contents of BUF to a NEON quad register, by writing to
8997 two double registers. This is used to implement the quad pseudo
8998 registers, and for argument passing in case the quad registers are
8999 missing; vectors are passed in quad registers when using the VFP
9000 ABI, even if a NEON unit is not present. REGNUM is the index
9001 of the quad register, in [0, 15]. */
9002
9003 static void
9004 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9005 int regnum, const gdb_byte *buf)
9006 {
9007 char name_buf[4];
9008 int offset, double_regnum;
9009
9010 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9011 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9012 strlen (name_buf));
9013
9014 /* d0 is always the least significant half of q0. */
9015 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9016 offset = 8;
9017 else
9018 offset = 0;
9019
9020 regcache->raw_write (double_regnum, buf + offset);
9021 offset = 8 - offset;
9022 regcache->raw_write (double_regnum + 1, buf + offset);
9023 }
9024
9025 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9026
9027 static void
9028 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9029 int regnum, const gdb_byte *buf)
9030 {
9031 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9032
9033 /* P0 is the first 16 bits of VPR. */
9034 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9035 }
9036
9037 static void
9038 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9039 int regnum, const gdb_byte *buf)
9040 {
9041 const int num_regs = gdbarch_num_regs (gdbarch);
9042 char name_buf[4];
9043 gdb_byte reg_buf[8];
9044 int offset, double_regnum;
9045 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9046
9047 gdb_assert (regnum >= num_regs);
9048
9049 if (is_q_pseudo (gdbarch, regnum))
9050 {
9051 /* Quad-precision register. */
9052 arm_neon_quad_write (gdbarch, regcache,
9053 regnum - tdep->q_pseudo_base, buf);
9054 }
9055 else if (is_mve_pseudo (gdbarch, regnum))
9056 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9057 else
9058 {
9059 regnum -= tdep->s_pseudo_base;
9060 /* Single-precision register. */
9061 gdb_assert (regnum < 32);
9062
9063 /* s0 is always the least significant half of d0. */
9064 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9065 offset = (regnum & 1) ? 0 : 4;
9066 else
9067 offset = (regnum & 1) ? 4 : 0;
9068
9069 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9070 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9071 strlen (name_buf));
9072
9073 regcache->raw_read (double_regnum, reg_buf);
9074 memcpy (reg_buf + offset, buf, 4);
9075 regcache->raw_write (double_regnum, reg_buf);
9076 }
9077 }
9078
9079 static struct value *
9080 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9081 {
9082 const int *reg_p = (const int *) baton;
9083 return value_of_register (*reg_p, frame);
9084 }
9085 \f
9086 static enum gdb_osabi
9087 arm_elf_osabi_sniffer (bfd *abfd)
9088 {
9089 unsigned int elfosabi;
9090 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9091
9092 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9093
9094 if (elfosabi == ELFOSABI_ARM)
9095 /* GNU tools use this value. Check note sections in this case,
9096 as well. */
9097 {
9098 for (asection *sect : gdb_bfd_sections (abfd))
9099 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9100 }
9101
9102 /* Anything else will be handled by the generic ELF sniffer. */
9103 return osabi;
9104 }
9105
9106 static int
9107 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9108 const struct reggroup *group)
9109 {
9110 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9111 this, FPS register belongs to save_regroup, restore_reggroup, and
9112 all_reggroup, of course. */
9113 if (regnum == ARM_FPS_REGNUM)
9114 return (group == float_reggroup
9115 || group == save_reggroup
9116 || group == restore_reggroup
9117 || group == all_reggroup);
9118 else
9119 return default_register_reggroup_p (gdbarch, regnum, group);
9120 }
9121
9122 /* For backward-compatibility we allow two 'g' packet lengths with
9123 the remote protocol depending on whether FPA registers are
9124 supplied. M-profile targets do not have FPA registers, but some
9125 stubs already exist in the wild which use a 'g' packet which
9126 supplies them albeit with dummy values. The packet format which
9127 includes FPA registers should be considered deprecated for
9128 M-profile targets. */
9129
9130 static void
9131 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9132 {
9133 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9134
9135 if (tdep->is_m)
9136 {
9137 const target_desc *tdesc;
9138
9139 /* If we know from the executable this is an M-profile target,
9140 cater for remote targets whose register set layout is the
9141 same as the FPA layout. */
9142 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9143 register_remote_g_packet_guess (gdbarch,
9144 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9145 tdesc);
9146
9147 /* The regular M-profile layout. */
9148 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9149 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9150 tdesc);
9151
9152 /* M-profile plus M4F VFP. */
9153 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9154 register_remote_g_packet_guess (gdbarch,
9155 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9156 tdesc);
9157 /* M-profile plus MVE. */
9158 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9159 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9160 + ARM_VFP2_REGS_SIZE
9161 + ARM_INT_REGISTER_SIZE, tdesc);
9162 }
9163
9164 /* Otherwise we don't have a useful guess. */
9165 }
9166
9167 /* Implement the code_of_frame_writable gdbarch method. */
9168
9169 static int
9170 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9171 {
9172 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9173
9174 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9175 {
9176 /* M-profile exception frames return to some magic PCs, where
9177 isn't writable at all. */
9178 return 0;
9179 }
9180 else
9181 return 1;
9182 }
9183
9184 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9185 to be postfixed by a version (eg armv7hl). */
9186
9187 static const char *
9188 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9189 {
9190 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9191 return "arm(v[^- ]*)?";
9192 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9193 }
9194
9195 /* Implement the "get_pc_address_flags" gdbarch method. */
9196
9197 static std::string
9198 arm_get_pc_address_flags (frame_info *frame, CORE_ADDR pc)
9199 {
9200 if (get_frame_pc_masked (frame))
9201 return "PAC";
9202
9203 return "";
9204 }
9205
9206 /* Initialize the current architecture based on INFO. If possible,
9207 re-use an architecture from ARCHES, which is a list of
9208 architectures already created during this debugging session.
9209
9210 Called e.g. at program startup, when reading a core file, and when
9211 reading a binary file. */
9212
9213 static struct gdbarch *
9214 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9215 {
9216 struct gdbarch *gdbarch;
9217 struct gdbarch_list *best_arch;
9218 enum arm_abi_kind arm_abi = arm_abi_global;
9219 enum arm_float_model fp_model = arm_fp_model;
9220 tdesc_arch_data_up tdesc_data;
9221 int i;
9222 bool is_m = false;
9223 int vfp_register_count = 0;
9224 bool have_s_pseudos = false, have_q_pseudos = false;
9225 bool have_wmmx_registers = false;
9226 bool have_neon = false;
9227 bool have_fpa_registers = true;
9228 const struct target_desc *tdesc = info.target_desc;
9229 bool have_vfp = false;
9230 bool have_mve = false;
9231 bool have_pacbti = false;
9232 int mve_vpr_regnum = -1;
9233 int register_count = ARM_NUM_REGS;
9234
9235 /* If we have an object to base this architecture on, try to determine
9236 its ABI. */
9237
9238 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9239 {
9240 int ei_osabi, e_flags;
9241
9242 switch (bfd_get_flavour (info.abfd))
9243 {
9244 case bfd_target_coff_flavour:
9245 /* Assume it's an old APCS-style ABI. */
9246 /* XXX WinCE? */
9247 arm_abi = ARM_ABI_APCS;
9248 break;
9249
9250 case bfd_target_elf_flavour:
9251 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9252 e_flags = elf_elfheader (info.abfd)->e_flags;
9253
9254 if (ei_osabi == ELFOSABI_ARM)
9255 {
9256 /* GNU tools used to use this value, but do not for EABI
9257 objects. There's nowhere to tag an EABI version
9258 anyway, so assume APCS. */
9259 arm_abi = ARM_ABI_APCS;
9260 }
9261 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9262 {
9263 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9264
9265 switch (eabi_ver)
9266 {
9267 case EF_ARM_EABI_UNKNOWN:
9268 /* Assume GNU tools. */
9269 arm_abi = ARM_ABI_APCS;
9270 break;
9271
9272 case EF_ARM_EABI_VER4:
9273 case EF_ARM_EABI_VER5:
9274 arm_abi = ARM_ABI_AAPCS;
9275 /* EABI binaries default to VFP float ordering.
9276 They may also contain build attributes that can
9277 be used to identify if the VFP argument-passing
9278 ABI is in use. */
9279 if (fp_model == ARM_FLOAT_AUTO)
9280 {
9281 #ifdef HAVE_ELF
9282 switch (bfd_elf_get_obj_attr_int (info.abfd,
9283 OBJ_ATTR_PROC,
9284 Tag_ABI_VFP_args))
9285 {
9286 case AEABI_VFP_args_base:
9287 /* "The user intended FP parameter/result
9288 passing to conform to AAPCS, base
9289 variant". */
9290 fp_model = ARM_FLOAT_SOFT_VFP;
9291 break;
9292 case AEABI_VFP_args_vfp:
9293 /* "The user intended FP parameter/result
9294 passing to conform to AAPCS, VFP
9295 variant". */
9296 fp_model = ARM_FLOAT_VFP;
9297 break;
9298 case AEABI_VFP_args_toolchain:
9299 /* "The user intended FP parameter/result
9300 passing to conform to tool chain-specific
9301 conventions" - we don't know any such
9302 conventions, so leave it as "auto". */
9303 break;
9304 case AEABI_VFP_args_compatible:
9305 /* "Code is compatible with both the base
9306 and VFP variants; the user did not permit
9307 non-variadic functions to pass FP
9308 parameters/results" - leave it as
9309 "auto". */
9310 break;
9311 default:
9312 /* Attribute value not mentioned in the
9313 November 2012 ABI, so leave it as
9314 "auto". */
9315 break;
9316 }
9317 #else
9318 fp_model = ARM_FLOAT_SOFT_VFP;
9319 #endif
9320 }
9321 break;
9322
9323 default:
9324 /* Leave it as "auto". */
9325 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9326 break;
9327 }
9328
9329 #ifdef HAVE_ELF
9330 /* Detect M-profile programs. This only works if the
9331 executable file includes build attributes; GCC does
9332 copy them to the executable, but e.g. RealView does
9333 not. */
9334 int attr_arch
9335 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9336 Tag_CPU_arch);
9337 int attr_profile
9338 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9339 Tag_CPU_arch_profile);
9340
9341 /* GCC specifies the profile for v6-M; RealView only
9342 specifies the profile for architectures starting with
9343 V7 (as opposed to architectures with a tag
9344 numerically greater than TAG_CPU_ARCH_V7). */
9345 if (!tdesc_has_registers (tdesc)
9346 && (attr_arch == TAG_CPU_ARCH_V6_M
9347 || attr_arch == TAG_CPU_ARCH_V6S_M
9348 || attr_arch == TAG_CPU_ARCH_V7E_M
9349 || attr_arch == TAG_CPU_ARCH_V8M_BASE
9350 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
9351 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9352 || attr_profile == 'M'))
9353 is_m = true;
9354
9355 /* Look for attributes that indicate support for ARMv8.1-m
9356 PACBTI. */
9357 if (!tdesc_has_registers (tdesc) && is_m)
9358 {
9359 int attr_pac_extension
9360 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9361 Tag_PAC_extension);
9362
9363 int attr_bti_extension
9364 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9365 Tag_BTI_extension);
9366
9367 int attr_pacret_use
9368 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9369 Tag_PACRET_use);
9370
9371 int attr_bti_use
9372 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9373 Tag_BTI_use);
9374
9375 if (attr_pac_extension != 0 || attr_bti_extension != 0
9376 || attr_pacret_use != 0 || attr_bti_use != 0)
9377 have_pacbti = true;
9378 }
9379 #endif
9380 }
9381
9382 if (fp_model == ARM_FLOAT_AUTO)
9383 {
9384 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9385 {
9386 case 0:
9387 /* Leave it as "auto". Strictly speaking this case
9388 means FPA, but almost nobody uses that now, and
9389 many toolchains fail to set the appropriate bits
9390 for the floating-point model they use. */
9391 break;
9392 case EF_ARM_SOFT_FLOAT:
9393 fp_model = ARM_FLOAT_SOFT_FPA;
9394 break;
9395 case EF_ARM_VFP_FLOAT:
9396 fp_model = ARM_FLOAT_VFP;
9397 break;
9398 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9399 fp_model = ARM_FLOAT_SOFT_VFP;
9400 break;
9401 }
9402 }
9403
9404 if (e_flags & EF_ARM_BE8)
9405 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9406
9407 break;
9408
9409 default:
9410 /* Leave it as "auto". */
9411 break;
9412 }
9413 }
9414
9415 /* Check any target description for validity. */
9416 if (tdesc_has_registers (tdesc))
9417 {
9418 /* For most registers we require GDB's default names; but also allow
9419 the numeric names for sp / lr / pc, as a convenience. */
9420 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9421 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9422 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9423
9424 const struct tdesc_feature *feature;
9425 int valid_p;
9426
9427 feature = tdesc_find_feature (tdesc,
9428 "org.gnu.gdb.arm.core");
9429 if (feature == NULL)
9430 {
9431 feature = tdesc_find_feature (tdesc,
9432 "org.gnu.gdb.arm.m-profile");
9433 if (feature == NULL)
9434 return NULL;
9435 else
9436 is_m = true;
9437 }
9438
9439 tdesc_data = tdesc_data_alloc ();
9440
9441 valid_p = 1;
9442 for (i = 0; i < ARM_SP_REGNUM; i++)
9443 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9444 arm_register_names[i]);
9445 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9446 ARM_SP_REGNUM,
9447 arm_sp_names);
9448 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9449 ARM_LR_REGNUM,
9450 arm_lr_names);
9451 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9452 ARM_PC_REGNUM,
9453 arm_pc_names);
9454 if (is_m)
9455 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9456 ARM_PS_REGNUM, "xpsr");
9457 else
9458 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9459 ARM_PS_REGNUM, "cpsr");
9460
9461 if (!valid_p)
9462 return NULL;
9463
9464 feature = tdesc_find_feature (tdesc,
9465 "org.gnu.gdb.arm.fpa");
9466 if (feature != NULL)
9467 {
9468 valid_p = 1;
9469 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9470 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9471 arm_register_names[i]);
9472 if (!valid_p)
9473 return NULL;
9474 }
9475 else
9476 have_fpa_registers = false;
9477
9478 feature = tdesc_find_feature (tdesc,
9479 "org.gnu.gdb.xscale.iwmmxt");
9480 if (feature != NULL)
9481 {
9482 static const char *const iwmmxt_names[] = {
9483 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9484 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9485 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9486 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9487 };
9488
9489 valid_p = 1;
9490 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9491 valid_p
9492 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9493 iwmmxt_names[i - ARM_WR0_REGNUM]);
9494
9495 /* Check for the control registers, but do not fail if they
9496 are missing. */
9497 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9498 tdesc_numbered_register (feature, tdesc_data.get (), i,
9499 iwmmxt_names[i - ARM_WR0_REGNUM]);
9500
9501 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9502 valid_p
9503 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9504 iwmmxt_names[i - ARM_WR0_REGNUM]);
9505
9506 if (!valid_p)
9507 return NULL;
9508
9509 have_wmmx_registers = true;
9510 }
9511
9512 /* If we have a VFP unit, check whether the single precision registers
9513 are present. If not, then we will synthesize them as pseudo
9514 registers. */
9515 feature = tdesc_find_feature (tdesc,
9516 "org.gnu.gdb.arm.vfp");
9517 if (feature != NULL)
9518 {
9519 static const char *const vfp_double_names[] = {
9520 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9521 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9522 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9523 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9524 };
9525
9526 /* Require the double precision registers. There must be either
9527 16 or 32. */
9528 valid_p = 1;
9529 for (i = 0; i < 32; i++)
9530 {
9531 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9532 ARM_D0_REGNUM + i,
9533 vfp_double_names[i]);
9534 if (!valid_p)
9535 break;
9536 }
9537 if (!valid_p && i == 16)
9538 valid_p = 1;
9539
9540 /* Also require FPSCR. */
9541 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9542 ARM_FPSCR_REGNUM, "fpscr");
9543 if (!valid_p)
9544 return NULL;
9545
9546 have_vfp = true;
9547
9548 if (tdesc_unnumbered_register (feature, "s0") == 0)
9549 have_s_pseudos = true;
9550
9551 vfp_register_count = i;
9552
9553 /* If we have VFP, also check for NEON. The architecture allows
9554 NEON without VFP (integer vector operations only), but GDB
9555 does not support that. */
9556 feature = tdesc_find_feature (tdesc,
9557 "org.gnu.gdb.arm.neon");
9558 if (feature != NULL)
9559 {
9560 /* NEON requires 32 double-precision registers. */
9561 if (i != 32)
9562 return NULL;
9563
9564 /* If there are quad registers defined by the stub, use
9565 their type; otherwise (normally) provide them with
9566 the default type. */
9567 if (tdesc_unnumbered_register (feature, "q0") == 0)
9568 have_q_pseudos = true;
9569 }
9570 }
9571
9572 /* Check for MVE after all the checks for GPR's, VFP and Neon.
9573 MVE (Helium) is an M-profile extension. */
9574 if (is_m)
9575 {
9576 /* Do we have the MVE feature? */
9577 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
9578
9579 if (feature != nullptr)
9580 {
9581 /* If we have MVE, we must always have the VPR register. */
9582 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9583 register_count, "vpr");
9584 if (!valid_p)
9585 {
9586 warning (_("MVE feature is missing required register vpr."));
9587 return nullptr;
9588 }
9589
9590 have_mve = true;
9591 mve_vpr_regnum = register_count;
9592 register_count++;
9593
9594 /* We can't have Q pseudo registers available here, as that
9595 would mean we have NEON features, and that is only available
9596 on A and R profiles. */
9597 gdb_assert (!have_q_pseudos);
9598
9599 /* Given we have a M-profile target description, if MVE is
9600 enabled and there are VFP registers, we should have Q
9601 pseudo registers (Q0 ~ Q7). */
9602 if (have_vfp)
9603 have_q_pseudos = true;
9604 }
9605
9606 /* Do we have the ARMv8.1-m PACBTI feature? */
9607 feature = tdesc_find_feature (tdesc,
9608 "org.gnu.gdb.arm.m-profile-pacbti");
9609 if (feature != nullptr)
9610 {
9611 /* By advertising this feature, the target acknowledges the
9612 presence of the ARMv8.1-m PACBTI extensions.
9613
9614 We don't care for any particular registers in this group, so
9615 the target is free to include whatever it deems appropriate.
9616
9617 The expectation is for this feature to include the PAC
9618 keys. */
9619 have_pacbti = true;
9620 }
9621 }
9622 }
9623
9624 /* If there is already a candidate, use it. */
9625 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9626 best_arch != NULL;
9627 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9628 {
9629 arm_gdbarch_tdep *tdep
9630 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
9631
9632 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
9633 continue;
9634
9635 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
9636 continue;
9637
9638 /* There are various other properties in tdep that we do not
9639 need to check here: those derived from a target description,
9640 since gdbarches with a different target description are
9641 automatically disqualified. */
9642
9643 /* Do check is_m, though, since it might come from the binary. */
9644 if (is_m != tdep->is_m)
9645 continue;
9646
9647 /* Also check for ARMv8.1-m PACBTI support, since it might come from
9648 the binary. */
9649 if (have_pacbti != tdep->have_pacbti)
9650 continue;
9651
9652 /* Found a match. */
9653 break;
9654 }
9655
9656 if (best_arch != NULL)
9657 return best_arch->gdbarch;
9658
9659 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
9660 gdbarch = gdbarch_alloc (&info, tdep);
9661
9662 /* Record additional information about the architecture we are defining.
9663 These are gdbarch discriminators, like the OSABI. */
9664 tdep->arm_abi = arm_abi;
9665 tdep->fp_model = fp_model;
9666 tdep->is_m = is_m;
9667 tdep->have_fpa_registers = have_fpa_registers;
9668 tdep->have_wmmx_registers = have_wmmx_registers;
9669 gdb_assert (vfp_register_count == 0
9670 || vfp_register_count == 16
9671 || vfp_register_count == 32);
9672 tdep->vfp_register_count = vfp_register_count;
9673 tdep->have_s_pseudos = have_s_pseudos;
9674 tdep->have_q_pseudos = have_q_pseudos;
9675 tdep->have_neon = have_neon;
9676
9677 /* Adjust the MVE feature settings. */
9678 if (have_mve)
9679 {
9680 tdep->have_mve = true;
9681 tdep->mve_vpr_regnum = mve_vpr_regnum;
9682 }
9683
9684 /* Adjust the PACBTI feature settings. */
9685 tdep->have_pacbti = have_pacbti;
9686
9687 arm_register_g_packet_guesses (gdbarch);
9688
9689 /* Breakpoints. */
9690 switch (info.byte_order_for_code)
9691 {
9692 case BFD_ENDIAN_BIG:
9693 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9694 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9695 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9696 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9697
9698 break;
9699
9700 case BFD_ENDIAN_LITTLE:
9701 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9702 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9703 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9704 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9705
9706 break;
9707
9708 default:
9709 internal_error (__FILE__, __LINE__,
9710 _("arm_gdbarch_init: bad byte order for float format"));
9711 }
9712
9713 /* On ARM targets char defaults to unsigned. */
9714 set_gdbarch_char_signed (gdbarch, 0);
9715
9716 /* wchar_t is unsigned under the AAPCS. */
9717 if (tdep->arm_abi == ARM_ABI_AAPCS)
9718 set_gdbarch_wchar_signed (gdbarch, 0);
9719 else
9720 set_gdbarch_wchar_signed (gdbarch, 1);
9721
9722 /* Compute type alignment. */
9723 set_gdbarch_type_align (gdbarch, arm_type_align);
9724
9725 /* Note: for displaced stepping, this includes the breakpoint, and one word
9726 of additional scratch space. This setting isn't used for anything beside
9727 displaced stepping at present. */
9728 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9729
9730 /* This should be low enough for everything. */
9731 tdep->lowest_pc = 0x20;
9732 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9733
9734 /* The default, for both APCS and AAPCS, is to return small
9735 structures in registers. */
9736 tdep->struct_return = reg_struct_return;
9737
9738 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9739 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9740
9741 if (is_m)
9742 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9743
9744 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9745
9746 frame_base_set_default (gdbarch, &arm_normal_base);
9747
9748 /* Address manipulation. */
9749 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9750
9751 /* Advance PC across function entry code. */
9752 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9753
9754 /* Detect whether PC is at a point where the stack has been destroyed. */
9755 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9756
9757 /* Skip trampolines. */
9758 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9759
9760 /* The stack grows downward. */
9761 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9762
9763 /* Breakpoint manipulation. */
9764 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9765 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9766 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9767 arm_breakpoint_kind_from_current_state);
9768
9769 /* Information about registers, etc. */
9770 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9771 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9772 set_gdbarch_num_regs (gdbarch, register_count);
9773 set_gdbarch_register_type (gdbarch, arm_register_type);
9774 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9775
9776 /* This "info float" is FPA-specific. Use the generic version if we
9777 do not have FPA. */
9778 if (tdep->have_fpa_registers)
9779 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9780
9781 /* Internal <-> external register number maps. */
9782 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9783 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9784
9785 set_gdbarch_register_name (gdbarch, arm_register_name);
9786
9787 /* Returning results. */
9788 set_gdbarch_return_value (gdbarch, arm_return_value);
9789
9790 /* Disassembly. */
9791 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9792
9793 /* Minsymbol frobbing. */
9794 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9795 set_gdbarch_coff_make_msymbol_special (gdbarch,
9796 arm_coff_make_msymbol_special);
9797 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9798
9799 /* Thumb-2 IT block support. */
9800 set_gdbarch_adjust_breakpoint_address (gdbarch,
9801 arm_adjust_breakpoint_address);
9802
9803 /* Virtual tables. */
9804 set_gdbarch_vbit_in_delta (gdbarch, 1);
9805
9806 /* Hook in the ABI-specific overrides, if they have been registered. */
9807 gdbarch_init_osabi (info, gdbarch);
9808
9809 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9810
9811 /* Add some default predicates. */
9812 if (is_m)
9813 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9814 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9815 dwarf2_append_unwinders (gdbarch);
9816 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9817 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9818 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9819
9820 /* Now we have tuned the configuration, set a few final things,
9821 based on what the OS ABI has told us. */
9822
9823 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9824 binaries are always marked. */
9825 if (tdep->arm_abi == ARM_ABI_AUTO)
9826 tdep->arm_abi = ARM_ABI_APCS;
9827
9828 /* Watchpoints are not steppable. */
9829 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9830
9831 /* We used to default to FPA for generic ARM, but almost nobody
9832 uses that now, and we now provide a way for the user to force
9833 the model. So default to the most useful variant. */
9834 if (tdep->fp_model == ARM_FLOAT_AUTO)
9835 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9836
9837 if (tdep->jb_pc >= 0)
9838 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9839
9840 /* Floating point sizes and format. */
9841 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9842 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9843 {
9844 set_gdbarch_double_format
9845 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9846 set_gdbarch_long_double_format
9847 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9848 }
9849 else
9850 {
9851 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9852 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9853 }
9854
9855 /* Hook used to decorate frames with signed return addresses, only available
9856 for ARMv8.1-m PACBTI. */
9857 if (is_m && have_pacbti)
9858 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
9859
9860 if (tdesc_data != nullptr)
9861 {
9862 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9863
9864 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9865 register_count = gdbarch_num_regs (gdbarch);
9866
9867 /* Override tdesc_register_type to adjust the types of VFP
9868 registers for NEON. */
9869 set_gdbarch_register_type (gdbarch, arm_register_type);
9870 }
9871
9872 /* Initialize the pseudo register data. */
9873 int num_pseudos = 0;
9874 if (tdep->have_s_pseudos)
9875 {
9876 /* VFP single precision pseudo registers (S0~S31). */
9877 tdep->s_pseudo_base = register_count;
9878 tdep->s_pseudo_count = 32;
9879 num_pseudos += tdep->s_pseudo_count;
9880
9881 if (tdep->have_q_pseudos)
9882 {
9883 /* NEON quad precision pseudo registers (Q0~Q15). */
9884 tdep->q_pseudo_base = register_count + num_pseudos;
9885
9886 if (have_neon)
9887 tdep->q_pseudo_count = 16;
9888 else if (have_mve)
9889 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
9890
9891 num_pseudos += tdep->q_pseudo_count;
9892 }
9893 }
9894
9895 /* Do we have any MVE pseudo registers? */
9896 if (have_mve)
9897 {
9898 tdep->mve_pseudo_base = register_count + num_pseudos;
9899 tdep->mve_pseudo_count = 1;
9900 num_pseudos += tdep->mve_pseudo_count;
9901 }
9902
9903 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
9904 if (have_pacbti)
9905 {
9906 tdep->pacbti_pseudo_base = register_count + num_pseudos;
9907 tdep->pacbti_pseudo_count = 1;
9908 num_pseudos += tdep->pacbti_pseudo_count;
9909 }
9910
9911 /* Set some pseudo register hooks, if we have pseudo registers. */
9912 if (tdep->have_s_pseudos || have_mve || have_pacbti)
9913 {
9914 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9915 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9916 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9917 }
9918
9919 /* Add standard register aliases. We add aliases even for those
9920 names which are used by the current architecture - it's simpler,
9921 and does no harm, since nothing ever lists user registers. */
9922 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9923 user_reg_add (gdbarch, arm_register_aliases[i].name,
9924 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9925
9926 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9927 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9928
9929 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9930
9931 return gdbarch;
9932 }
9933
9934 static void
9935 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9936 {
9937 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9938
9939 if (tdep == NULL)
9940 return;
9941
9942 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
9943 (int) tdep->fp_model);
9944 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9945 (int) tdep->have_fpa_registers);
9946 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9947 (int) tdep->have_wmmx_registers);
9948 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9949 (int) tdep->vfp_register_count);
9950 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
9951 tdep->have_s_pseudos? "true" : "false");
9952 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
9953 (int) tdep->s_pseudo_base);
9954 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
9955 (int) tdep->s_pseudo_count);
9956 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
9957 tdep->have_q_pseudos? "true" : "false");
9958 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
9959 (int) tdep->q_pseudo_base);
9960 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
9961 (int) tdep->q_pseudo_count);
9962 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
9963 (int) tdep->have_neon);
9964 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
9965 tdep->have_mve? "yes" : "no");
9966 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
9967 tdep->mve_vpr_regnum);
9968 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
9969 tdep->mve_pseudo_base);
9970 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
9971 tdep->mve_pseudo_count);
9972 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9973 (unsigned long) tdep->lowest_pc);
9974 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
9975 tdep->have_pacbti? "yes" : "no");
9976 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
9977 tdep->pacbti_pseudo_base);
9978 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
9979 tdep->pacbti_pseudo_count);
9980 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
9981 tdep->is_m? "yes" : "no");
9982 }
9983
9984 #if GDB_SELF_TEST
9985 namespace selftests
9986 {
9987 static void arm_record_test (void);
9988 static void arm_analyze_prologue_test ();
9989 }
9990 #endif
9991
9992 void _initialize_arm_tdep ();
9993 void
9994 _initialize_arm_tdep ()
9995 {
9996 long length;
9997 int i, j;
9998 char regdesc[1024], *rdptr = regdesc;
9999 size_t rest = sizeof (regdesc);
10000
10001 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10002
10003 /* Add ourselves to objfile event chain. */
10004 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10005
10006 /* Register an ELF OS ABI sniffer for ARM binaries. */
10007 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10008 bfd_target_elf_flavour,
10009 arm_elf_osabi_sniffer);
10010
10011 /* Add root prefix command for all "set arm"/"show arm" commands. */
10012 add_setshow_prefix_cmd ("arm", no_class,
10013 _("Various ARM-specific commands."),
10014 _("Various ARM-specific commands."),
10015 &setarmcmdlist, &showarmcmdlist,
10016 &setlist, &showlist);
10017
10018 arm_disassembler_options = xstrdup ("reg-names-std");
10019 const disasm_options_t *disasm_options
10020 = &disassembler_options_arm ()->options;
10021 int num_disassembly_styles = 0;
10022 for (i = 0; disasm_options->name[i] != NULL; i++)
10023 if (startswith (disasm_options->name[i], "reg-names-"))
10024 num_disassembly_styles++;
10025
10026 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10027 valid_disassembly_styles = XNEWVEC (const char *,
10028 num_disassembly_styles + 1);
10029 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10030 if (startswith (disasm_options->name[i], "reg-names-"))
10031 {
10032 size_t offset = strlen ("reg-names-");
10033 const char *style = disasm_options->name[i];
10034 valid_disassembly_styles[j++] = &style[offset];
10035 if (strcmp (&style[offset], "std") == 0)
10036 disassembly_style = &style[offset];
10037 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10038 disasm_options->description[i]);
10039 rdptr += length;
10040 rest -= length;
10041 }
10042 /* Mark the end of valid options. */
10043 valid_disassembly_styles[num_disassembly_styles] = NULL;
10044
10045 /* Create the help text. */
10046 std::string helptext = string_printf ("%s%s%s",
10047 _("The valid values are:\n"),
10048 regdesc,
10049 _("The default is \"std\"."));
10050
10051 add_setshow_enum_cmd("disassembler", no_class,
10052 valid_disassembly_styles, &disassembly_style,
10053 _("Set the disassembly style."),
10054 _("Show the disassembly style."),
10055 helptext.c_str (),
10056 set_disassembly_style_sfunc,
10057 show_disassembly_style_sfunc,
10058 &setarmcmdlist, &showarmcmdlist);
10059
10060 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10061 _("Set usage of ARM 32-bit mode."),
10062 _("Show usage of ARM 32-bit mode."),
10063 _("When off, a 26-bit PC will be used."),
10064 NULL,
10065 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10066 mode is %s. */
10067 &setarmcmdlist, &showarmcmdlist);
10068
10069 /* Add a command to allow the user to force the FPU model. */
10070 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10071 _("Set the floating point type."),
10072 _("Show the floating point type."),
10073 _("auto - Determine the FP typefrom the OS-ABI.\n\
10074 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10075 fpa - FPA co-processor (GCC compiled).\n\
10076 softvfp - Software FP with pure-endian doubles.\n\
10077 vfp - VFP co-processor."),
10078 set_fp_model_sfunc, show_fp_model,
10079 &setarmcmdlist, &showarmcmdlist);
10080
10081 /* Add a command to allow the user to force the ABI. */
10082 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10083 _("Set the ABI."),
10084 _("Show the ABI."),
10085 NULL, arm_set_abi, arm_show_abi,
10086 &setarmcmdlist, &showarmcmdlist);
10087
10088 /* Add two commands to allow the user to force the assumed
10089 execution mode. */
10090 add_setshow_enum_cmd ("fallback-mode", class_support,
10091 arm_mode_strings, &arm_fallback_mode_string,
10092 _("Set the mode assumed when symbols are unavailable."),
10093 _("Show the mode assumed when symbols are unavailable."),
10094 NULL, NULL, arm_show_fallback_mode,
10095 &setarmcmdlist, &showarmcmdlist);
10096 add_setshow_enum_cmd ("force-mode", class_support,
10097 arm_mode_strings, &arm_force_mode_string,
10098 _("Set the mode assumed even when symbols are available."),
10099 _("Show the mode assumed even when symbols are available."),
10100 NULL, NULL, arm_show_force_mode,
10101 &setarmcmdlist, &showarmcmdlist);
10102
10103 /* Debugging flag. */
10104 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10105 _("Set ARM debugging."),
10106 _("Show ARM debugging."),
10107 _("When on, arm-specific debugging is enabled."),
10108 NULL,
10109 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10110 &setdebuglist, &showdebuglist);
10111
10112 #if GDB_SELF_TEST
10113 selftests::register_test ("arm-record", selftests::arm_record_test);
10114 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
10115 #endif
10116
10117 }
10118
10119 /* ARM-reversible process record data structures. */
10120
10121 #define ARM_INSN_SIZE_BYTES 4
10122 #define THUMB_INSN_SIZE_BYTES 2
10123 #define THUMB2_INSN_SIZE_BYTES 4
10124
10125
10126 /* Position of the bit within a 32-bit ARM instruction
10127 that defines whether the instruction is a load or store. */
10128 #define INSN_S_L_BIT_NUM 20
10129
10130 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10131 do \
10132 { \
10133 unsigned int reg_len = LENGTH; \
10134 if (reg_len) \
10135 { \
10136 REGS = XNEWVEC (uint32_t, reg_len); \
10137 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10138 } \
10139 } \
10140 while (0)
10141
10142 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10143 do \
10144 { \
10145 unsigned int mem_len = LENGTH; \
10146 if (mem_len) \
10147 { \
10148 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10149 memcpy(&MEMS->len, &RECORD_BUF[0], \
10150 sizeof(struct arm_mem_r) * LENGTH); \
10151 } \
10152 } \
10153 while (0)
10154
10155 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10156 #define INSN_RECORDED(ARM_RECORD) \
10157 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10158
10159 /* ARM memory record structure. */
10160 struct arm_mem_r
10161 {
10162 uint32_t len; /* Record length. */
10163 uint32_t addr; /* Memory address. */
10164 };
10165
10166 /* ARM instruction record contains opcode of current insn
10167 and execution state (before entry to decode_insn()),
10168 contains list of to-be-modified registers and
10169 memory blocks (on return from decode_insn()). */
10170
10171 typedef struct insn_decode_record_t
10172 {
10173 struct gdbarch *gdbarch;
10174 struct regcache *regcache;
10175 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10176 uint32_t arm_insn; /* Should accommodate thumb. */
10177 uint32_t cond; /* Condition code. */
10178 uint32_t opcode; /* Insn opcode. */
10179 uint32_t decode; /* Insn decode bits. */
10180 uint32_t mem_rec_count; /* No of mem records. */
10181 uint32_t reg_rec_count; /* No of reg records. */
10182 uint32_t *arm_regs; /* Registers to be saved for this record. */
10183 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10184 } insn_decode_record;
10185
10186
10187 /* Checks ARM SBZ and SBO mandatory fields. */
10188
10189 static int
10190 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10191 {
10192 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10193
10194 if (!len)
10195 return 1;
10196
10197 if (!sbo)
10198 ones = ~ones;
10199
10200 while (ones)
10201 {
10202 if (!(ones & sbo))
10203 {
10204 return 0;
10205 }
10206 ones = ones >> 1;
10207 }
10208 return 1;
10209 }
10210
10211 enum arm_record_result
10212 {
10213 ARM_RECORD_SUCCESS = 0,
10214 ARM_RECORD_FAILURE = 1
10215 };
10216
10217 typedef enum
10218 {
10219 ARM_RECORD_STRH=1,
10220 ARM_RECORD_STRD
10221 } arm_record_strx_t;
10222
10223 typedef enum
10224 {
10225 ARM_RECORD=1,
10226 THUMB_RECORD,
10227 THUMB2_RECORD
10228 } record_type_t;
10229
10230
10231 static int
10232 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10233 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10234 {
10235
10236 struct regcache *reg_cache = arm_insn_r->regcache;
10237 ULONGEST u_regval[2]= {0};
10238
10239 uint32_t reg_src1 = 0, reg_src2 = 0;
10240 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10241
10242 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10243 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10244
10245 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10246 {
10247 /* 1) Handle misc store, immediate offset. */
10248 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10249 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10250 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10251 regcache_raw_read_unsigned (reg_cache, reg_src1,
10252 &u_regval[0]);
10253 if (ARM_PC_REGNUM == reg_src1)
10254 {
10255 /* If R15 was used as Rn, hence current PC+8. */
10256 u_regval[0] = u_regval[0] + 8;
10257 }
10258 offset_8 = (immed_high << 4) | immed_low;
10259 /* Calculate target store address. */
10260 if (14 == arm_insn_r->opcode)
10261 {
10262 tgt_mem_addr = u_regval[0] + offset_8;
10263 }
10264 else
10265 {
10266 tgt_mem_addr = u_regval[0] - offset_8;
10267 }
10268 if (ARM_RECORD_STRH == str_type)
10269 {
10270 record_buf_mem[0] = 2;
10271 record_buf_mem[1] = tgt_mem_addr;
10272 arm_insn_r->mem_rec_count = 1;
10273 }
10274 else if (ARM_RECORD_STRD == str_type)
10275 {
10276 record_buf_mem[0] = 4;
10277 record_buf_mem[1] = tgt_mem_addr;
10278 record_buf_mem[2] = 4;
10279 record_buf_mem[3] = tgt_mem_addr + 4;
10280 arm_insn_r->mem_rec_count = 2;
10281 }
10282 }
10283 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10284 {
10285 /* 2) Store, register offset. */
10286 /* Get Rm. */
10287 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10288 /* Get Rn. */
10289 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10290 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10291 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10292 if (15 == reg_src2)
10293 {
10294 /* If R15 was used as Rn, hence current PC+8. */
10295 u_regval[0] = u_regval[0] + 8;
10296 }
10297 /* Calculate target store address, Rn +/- Rm, register offset. */
10298 if (12 == arm_insn_r->opcode)
10299 {
10300 tgt_mem_addr = u_regval[0] + u_regval[1];
10301 }
10302 else
10303 {
10304 tgt_mem_addr = u_regval[1] - u_regval[0];
10305 }
10306 if (ARM_RECORD_STRH == str_type)
10307 {
10308 record_buf_mem[0] = 2;
10309 record_buf_mem[1] = tgt_mem_addr;
10310 arm_insn_r->mem_rec_count = 1;
10311 }
10312 else if (ARM_RECORD_STRD == str_type)
10313 {
10314 record_buf_mem[0] = 4;
10315 record_buf_mem[1] = tgt_mem_addr;
10316 record_buf_mem[2] = 4;
10317 record_buf_mem[3] = tgt_mem_addr + 4;
10318 arm_insn_r->mem_rec_count = 2;
10319 }
10320 }
10321 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10322 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10323 {
10324 /* 3) Store, immediate pre-indexed. */
10325 /* 5) Store, immediate post-indexed. */
10326 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10327 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10328 offset_8 = (immed_high << 4) | immed_low;
10329 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10330 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10331 /* Calculate target store address, Rn +/- Rm, register offset. */
10332 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10333 {
10334 tgt_mem_addr = u_regval[0] + offset_8;
10335 }
10336 else
10337 {
10338 tgt_mem_addr = u_regval[0] - offset_8;
10339 }
10340 if (ARM_RECORD_STRH == str_type)
10341 {
10342 record_buf_mem[0] = 2;
10343 record_buf_mem[1] = tgt_mem_addr;
10344 arm_insn_r->mem_rec_count = 1;
10345 }
10346 else if (ARM_RECORD_STRD == str_type)
10347 {
10348 record_buf_mem[0] = 4;
10349 record_buf_mem[1] = tgt_mem_addr;
10350 record_buf_mem[2] = 4;
10351 record_buf_mem[3] = tgt_mem_addr + 4;
10352 arm_insn_r->mem_rec_count = 2;
10353 }
10354 /* Record Rn also as it changes. */
10355 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10356 arm_insn_r->reg_rec_count = 1;
10357 }
10358 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10359 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10360 {
10361 /* 4) Store, register pre-indexed. */
10362 /* 6) Store, register post -indexed. */
10363 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10364 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10365 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10366 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10367 /* Calculate target store address, Rn +/- Rm, register offset. */
10368 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10369 {
10370 tgt_mem_addr = u_regval[0] + u_regval[1];
10371 }
10372 else
10373 {
10374 tgt_mem_addr = u_regval[1] - u_regval[0];
10375 }
10376 if (ARM_RECORD_STRH == str_type)
10377 {
10378 record_buf_mem[0] = 2;
10379 record_buf_mem[1] = tgt_mem_addr;
10380 arm_insn_r->mem_rec_count = 1;
10381 }
10382 else if (ARM_RECORD_STRD == str_type)
10383 {
10384 record_buf_mem[0] = 4;
10385 record_buf_mem[1] = tgt_mem_addr;
10386 record_buf_mem[2] = 4;
10387 record_buf_mem[3] = tgt_mem_addr + 4;
10388 arm_insn_r->mem_rec_count = 2;
10389 }
10390 /* Record Rn also as it changes. */
10391 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10392 arm_insn_r->reg_rec_count = 1;
10393 }
10394 return 0;
10395 }
10396
10397 /* Handling ARM extension space insns. */
10398
10399 static int
10400 arm_record_extension_space (insn_decode_record *arm_insn_r)
10401 {
10402 int ret = 0; /* Return value: -1:record failure ; 0:success */
10403 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10404 uint32_t record_buf[8], record_buf_mem[8];
10405 uint32_t reg_src1 = 0;
10406 struct regcache *reg_cache = arm_insn_r->regcache;
10407 ULONGEST u_regval = 0;
10408
10409 gdb_assert (!INSN_RECORDED(arm_insn_r));
10410 /* Handle unconditional insn extension space. */
10411
10412 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10413 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10414 if (arm_insn_r->cond)
10415 {
10416 /* PLD has no affect on architectural state, it just affects
10417 the caches. */
10418 if (5 == ((opcode1 & 0xE0) >> 5))
10419 {
10420 /* BLX(1) */
10421 record_buf[0] = ARM_PS_REGNUM;
10422 record_buf[1] = ARM_LR_REGNUM;
10423 arm_insn_r->reg_rec_count = 2;
10424 }
10425 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10426 }
10427
10428
10429 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10430 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10431 {
10432 ret = -1;
10433 /* Undefined instruction on ARM V5; need to handle if later
10434 versions define it. */
10435 }
10436
10437 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10438 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10439 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10440
10441 /* Handle arithmetic insn extension space. */
10442 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10443 && !INSN_RECORDED(arm_insn_r))
10444 {
10445 /* Handle MLA(S) and MUL(S). */
10446 if (in_inclusive_range (insn_op1, 0U, 3U))
10447 {
10448 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10449 record_buf[1] = ARM_PS_REGNUM;
10450 arm_insn_r->reg_rec_count = 2;
10451 }
10452 else if (in_inclusive_range (insn_op1, 4U, 15U))
10453 {
10454 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10455 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10456 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10457 record_buf[2] = ARM_PS_REGNUM;
10458 arm_insn_r->reg_rec_count = 3;
10459 }
10460 }
10461
10462 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10463 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10464 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10465
10466 /* Handle control insn extension space. */
10467
10468 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10469 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10470 {
10471 if (!bit (arm_insn_r->arm_insn,25))
10472 {
10473 if (!bits (arm_insn_r->arm_insn, 4, 7))
10474 {
10475 if ((0 == insn_op1) || (2 == insn_op1))
10476 {
10477 /* MRS. */
10478 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10479 arm_insn_r->reg_rec_count = 1;
10480 }
10481 else if (1 == insn_op1)
10482 {
10483 /* CSPR is going to be changed. */
10484 record_buf[0] = ARM_PS_REGNUM;
10485 arm_insn_r->reg_rec_count = 1;
10486 }
10487 else if (3 == insn_op1)
10488 {
10489 /* SPSR is going to be changed. */
10490 /* We need to get SPSR value, which is yet to be done. */
10491 return -1;
10492 }
10493 }
10494 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10495 {
10496 if (1 == insn_op1)
10497 {
10498 /* BX. */
10499 record_buf[0] = ARM_PS_REGNUM;
10500 arm_insn_r->reg_rec_count = 1;
10501 }
10502 else if (3 == insn_op1)
10503 {
10504 /* CLZ. */
10505 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10506 arm_insn_r->reg_rec_count = 1;
10507 }
10508 }
10509 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10510 {
10511 /* BLX. */
10512 record_buf[0] = ARM_PS_REGNUM;
10513 record_buf[1] = ARM_LR_REGNUM;
10514 arm_insn_r->reg_rec_count = 2;
10515 }
10516 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10517 {
10518 /* QADD, QSUB, QDADD, QDSUB */
10519 record_buf[0] = ARM_PS_REGNUM;
10520 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10521 arm_insn_r->reg_rec_count = 2;
10522 }
10523 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10524 {
10525 /* BKPT. */
10526 record_buf[0] = ARM_PS_REGNUM;
10527 record_buf[1] = ARM_LR_REGNUM;
10528 arm_insn_r->reg_rec_count = 2;
10529
10530 /* Save SPSR also;how? */
10531 return -1;
10532 }
10533 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10534 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10535 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10536 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10537 )
10538 {
10539 if (0 == insn_op1 || 1 == insn_op1)
10540 {
10541 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10542 /* We dont do optimization for SMULW<y> where we
10543 need only Rd. */
10544 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10545 record_buf[1] = ARM_PS_REGNUM;
10546 arm_insn_r->reg_rec_count = 2;
10547 }
10548 else if (2 == insn_op1)
10549 {
10550 /* SMLAL<x><y>. */
10551 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10552 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10553 arm_insn_r->reg_rec_count = 2;
10554 }
10555 else if (3 == insn_op1)
10556 {
10557 /* SMUL<x><y>. */
10558 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10559 arm_insn_r->reg_rec_count = 1;
10560 }
10561 }
10562 }
10563 else
10564 {
10565 /* MSR : immediate form. */
10566 if (1 == insn_op1)
10567 {
10568 /* CSPR is going to be changed. */
10569 record_buf[0] = ARM_PS_REGNUM;
10570 arm_insn_r->reg_rec_count = 1;
10571 }
10572 else if (3 == insn_op1)
10573 {
10574 /* SPSR is going to be changed. */
10575 /* we need to get SPSR value, which is yet to be done */
10576 return -1;
10577 }
10578 }
10579 }
10580
10581 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10582 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10583 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10584
10585 /* Handle load/store insn extension space. */
10586
10587 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10588 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10589 && !INSN_RECORDED(arm_insn_r))
10590 {
10591 /* SWP/SWPB. */
10592 if (0 == insn_op1)
10593 {
10594 /* These insn, changes register and memory as well. */
10595 /* SWP or SWPB insn. */
10596 /* Get memory address given by Rn. */
10597 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10598 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10599 /* SWP insn ?, swaps word. */
10600 if (8 == arm_insn_r->opcode)
10601 {
10602 record_buf_mem[0] = 4;
10603 }
10604 else
10605 {
10606 /* SWPB insn, swaps only byte. */
10607 record_buf_mem[0] = 1;
10608 }
10609 record_buf_mem[1] = u_regval;
10610 arm_insn_r->mem_rec_count = 1;
10611 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10612 arm_insn_r->reg_rec_count = 1;
10613 }
10614 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10615 {
10616 /* STRH. */
10617 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10618 ARM_RECORD_STRH);
10619 }
10620 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10621 {
10622 /* LDRD. */
10623 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10624 record_buf[1] = record_buf[0] + 1;
10625 arm_insn_r->reg_rec_count = 2;
10626 }
10627 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10628 {
10629 /* STRD. */
10630 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10631 ARM_RECORD_STRD);
10632 }
10633 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10634 {
10635 /* LDRH, LDRSB, LDRSH. */
10636 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10637 arm_insn_r->reg_rec_count = 1;
10638 }
10639
10640 }
10641
10642 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10643 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10644 && !INSN_RECORDED(arm_insn_r))
10645 {
10646 ret = -1;
10647 /* Handle coprocessor insn extension space. */
10648 }
10649
10650 /* To be done for ARMv5 and later; as of now we return -1. */
10651 if (-1 == ret)
10652 return ret;
10653
10654 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10655 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10656
10657 return ret;
10658 }
10659
10660 /* Handling opcode 000 insns. */
10661
10662 static int
10663 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10664 {
10665 struct regcache *reg_cache = arm_insn_r->regcache;
10666 uint32_t record_buf[8], record_buf_mem[8];
10667 ULONGEST u_regval[2] = {0};
10668
10669 uint32_t reg_src1 = 0;
10670 uint32_t opcode1 = 0;
10671
10672 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10673 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10674 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10675
10676 if (!((opcode1 & 0x19) == 0x10))
10677 {
10678 /* Data-processing (register) and Data-processing (register-shifted
10679 register */
10680 /* Out of 11 shifter operands mode, all the insn modifies destination
10681 register, which is specified by 13-16 decode. */
10682 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10683 record_buf[1] = ARM_PS_REGNUM;
10684 arm_insn_r->reg_rec_count = 2;
10685 }
10686 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10687 {
10688 /* Miscellaneous instructions */
10689
10690 if (3 == arm_insn_r->decode && 0x12 == opcode1
10691 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10692 {
10693 /* Handle BLX, branch and link/exchange. */
10694 if (9 == arm_insn_r->opcode)
10695 {
10696 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10697 and R14 stores the return address. */
10698 record_buf[0] = ARM_PS_REGNUM;
10699 record_buf[1] = ARM_LR_REGNUM;
10700 arm_insn_r->reg_rec_count = 2;
10701 }
10702 }
10703 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10704 {
10705 /* Handle enhanced software breakpoint insn, BKPT. */
10706 /* CPSR is changed to be executed in ARM state, disabling normal
10707 interrupts, entering abort mode. */
10708 /* According to high vector configuration PC is set. */
10709 /* user hit breakpoint and type reverse, in
10710 that case, we need to go back with previous CPSR and
10711 Program Counter. */
10712 record_buf[0] = ARM_PS_REGNUM;
10713 record_buf[1] = ARM_LR_REGNUM;
10714 arm_insn_r->reg_rec_count = 2;
10715
10716 /* Save SPSR also; how? */
10717 return -1;
10718 }
10719 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10720 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10721 {
10722 /* Handle BX, branch and link/exchange. */
10723 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10724 record_buf[0] = ARM_PS_REGNUM;
10725 arm_insn_r->reg_rec_count = 1;
10726 }
10727 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10728 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10729 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10730 {
10731 /* Count leading zeros: CLZ. */
10732 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10733 arm_insn_r->reg_rec_count = 1;
10734 }
10735 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10736 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10737 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10738 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10739 {
10740 /* Handle MRS insn. */
10741 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10742 arm_insn_r->reg_rec_count = 1;
10743 }
10744 }
10745 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10746 {
10747 /* Multiply and multiply-accumulate */
10748
10749 /* Handle multiply instructions. */
10750 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10751 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10752 {
10753 /* Handle MLA and MUL. */
10754 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10755 record_buf[1] = ARM_PS_REGNUM;
10756 arm_insn_r->reg_rec_count = 2;
10757 }
10758 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10759 {
10760 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10761 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10762 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10763 record_buf[2] = ARM_PS_REGNUM;
10764 arm_insn_r->reg_rec_count = 3;
10765 }
10766 }
10767 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10768 {
10769 /* Synchronization primitives */
10770
10771 /* Handling SWP, SWPB. */
10772 /* These insn, changes register and memory as well. */
10773 /* SWP or SWPB insn. */
10774
10775 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10777 /* SWP insn ?, swaps word. */
10778 if (8 == arm_insn_r->opcode)
10779 {
10780 record_buf_mem[0] = 4;
10781 }
10782 else
10783 {
10784 /* SWPB insn, swaps only byte. */
10785 record_buf_mem[0] = 1;
10786 }
10787 record_buf_mem[1] = u_regval[0];
10788 arm_insn_r->mem_rec_count = 1;
10789 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10790 arm_insn_r->reg_rec_count = 1;
10791 }
10792 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10793 || 15 == arm_insn_r->decode)
10794 {
10795 if ((opcode1 & 0x12) == 2)
10796 {
10797 /* Extra load/store (unprivileged) */
10798 return -1;
10799 }
10800 else
10801 {
10802 /* Extra load/store */
10803 switch (bits (arm_insn_r->arm_insn, 5, 6))
10804 {
10805 case 1:
10806 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10807 {
10808 /* STRH (register), STRH (immediate) */
10809 arm_record_strx (arm_insn_r, &record_buf[0],
10810 &record_buf_mem[0], ARM_RECORD_STRH);
10811 }
10812 else if ((opcode1 & 0x05) == 0x1)
10813 {
10814 /* LDRH (register) */
10815 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10816 arm_insn_r->reg_rec_count = 1;
10817
10818 if (bit (arm_insn_r->arm_insn, 21))
10819 {
10820 /* Write back to Rn. */
10821 record_buf[arm_insn_r->reg_rec_count++]
10822 = bits (arm_insn_r->arm_insn, 16, 19);
10823 }
10824 }
10825 else if ((opcode1 & 0x05) == 0x5)
10826 {
10827 /* LDRH (immediate), LDRH (literal) */
10828 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10829
10830 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10831 arm_insn_r->reg_rec_count = 1;
10832
10833 if (rn != 15)
10834 {
10835 /*LDRH (immediate) */
10836 if (bit (arm_insn_r->arm_insn, 21))
10837 {
10838 /* Write back to Rn. */
10839 record_buf[arm_insn_r->reg_rec_count++] = rn;
10840 }
10841 }
10842 }
10843 else
10844 return -1;
10845 break;
10846 case 2:
10847 if ((opcode1 & 0x05) == 0x0)
10848 {
10849 /* LDRD (register) */
10850 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10851 record_buf[1] = record_buf[0] + 1;
10852 arm_insn_r->reg_rec_count = 2;
10853
10854 if (bit (arm_insn_r->arm_insn, 21))
10855 {
10856 /* Write back to Rn. */
10857 record_buf[arm_insn_r->reg_rec_count++]
10858 = bits (arm_insn_r->arm_insn, 16, 19);
10859 }
10860 }
10861 else if ((opcode1 & 0x05) == 0x1)
10862 {
10863 /* LDRSB (register) */
10864 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10865 arm_insn_r->reg_rec_count = 1;
10866
10867 if (bit (arm_insn_r->arm_insn, 21))
10868 {
10869 /* Write back to Rn. */
10870 record_buf[arm_insn_r->reg_rec_count++]
10871 = bits (arm_insn_r->arm_insn, 16, 19);
10872 }
10873 }
10874 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10875 {
10876 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10877 LDRSB (literal) */
10878 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10879
10880 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10881 arm_insn_r->reg_rec_count = 1;
10882
10883 if (rn != 15)
10884 {
10885 /*LDRD (immediate), LDRSB (immediate) */
10886 if (bit (arm_insn_r->arm_insn, 21))
10887 {
10888 /* Write back to Rn. */
10889 record_buf[arm_insn_r->reg_rec_count++] = rn;
10890 }
10891 }
10892 }
10893 else
10894 return -1;
10895 break;
10896 case 3:
10897 if ((opcode1 & 0x05) == 0x0)
10898 {
10899 /* STRD (register) */
10900 arm_record_strx (arm_insn_r, &record_buf[0],
10901 &record_buf_mem[0], ARM_RECORD_STRD);
10902 }
10903 else if ((opcode1 & 0x05) == 0x1)
10904 {
10905 /* LDRSH (register) */
10906 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10907 arm_insn_r->reg_rec_count = 1;
10908
10909 if (bit (arm_insn_r->arm_insn, 21))
10910 {
10911 /* Write back to Rn. */
10912 record_buf[arm_insn_r->reg_rec_count++]
10913 = bits (arm_insn_r->arm_insn, 16, 19);
10914 }
10915 }
10916 else if ((opcode1 & 0x05) == 0x4)
10917 {
10918 /* STRD (immediate) */
10919 arm_record_strx (arm_insn_r, &record_buf[0],
10920 &record_buf_mem[0], ARM_RECORD_STRD);
10921 }
10922 else if ((opcode1 & 0x05) == 0x5)
10923 {
10924 /* LDRSH (immediate), LDRSH (literal) */
10925 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10926 arm_insn_r->reg_rec_count = 1;
10927
10928 if (bit (arm_insn_r->arm_insn, 21))
10929 {
10930 /* Write back to Rn. */
10931 record_buf[arm_insn_r->reg_rec_count++]
10932 = bits (arm_insn_r->arm_insn, 16, 19);
10933 }
10934 }
10935 else
10936 return -1;
10937 break;
10938 default:
10939 return -1;
10940 }
10941 }
10942 }
10943 else
10944 {
10945 return -1;
10946 }
10947
10948 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10949 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10950 return 0;
10951 }
10952
10953 /* Handling opcode 001 insns. */
10954
10955 static int
10956 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10957 {
10958 uint32_t record_buf[8], record_buf_mem[8];
10959
10960 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10961 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10962
10963 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10964 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10965 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10966 )
10967 {
10968 /* Handle MSR insn. */
10969 if (9 == arm_insn_r->opcode)
10970 {
10971 /* CSPR is going to be changed. */
10972 record_buf[0] = ARM_PS_REGNUM;
10973 arm_insn_r->reg_rec_count = 1;
10974 }
10975 else
10976 {
10977 /* SPSR is going to be changed. */
10978 }
10979 }
10980 else if (arm_insn_r->opcode <= 15)
10981 {
10982 /* Normal data processing insns. */
10983 /* Out of 11 shifter operands mode, all the insn modifies destination
10984 register, which is specified by 13-16 decode. */
10985 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10986 record_buf[1] = ARM_PS_REGNUM;
10987 arm_insn_r->reg_rec_count = 2;
10988 }
10989 else
10990 {
10991 return -1;
10992 }
10993
10994 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10995 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10996 return 0;
10997 }
10998
10999 static int
11000 arm_record_media (insn_decode_record *arm_insn_r)
11001 {
11002 uint32_t record_buf[8];
11003
11004 switch (bits (arm_insn_r->arm_insn, 22, 24))
11005 {
11006 case 0:
11007 /* Parallel addition and subtraction, signed */
11008 case 1:
11009 /* Parallel addition and subtraction, unsigned */
11010 case 2:
11011 case 3:
11012 /* Packing, unpacking, saturation and reversal */
11013 {
11014 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11015
11016 record_buf[arm_insn_r->reg_rec_count++] = rd;
11017 }
11018 break;
11019
11020 case 4:
11021 case 5:
11022 /* Signed multiplies */
11023 {
11024 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11025 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11026
11027 record_buf[arm_insn_r->reg_rec_count++] = rd;
11028 if (op1 == 0x0)
11029 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11030 else if (op1 == 0x4)
11031 record_buf[arm_insn_r->reg_rec_count++]
11032 = bits (arm_insn_r->arm_insn, 12, 15);
11033 }
11034 break;
11035
11036 case 6:
11037 {
11038 if (bit (arm_insn_r->arm_insn, 21)
11039 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
11040 {
11041 /* SBFX */
11042 record_buf[arm_insn_r->reg_rec_count++]
11043 = bits (arm_insn_r->arm_insn, 12, 15);
11044 }
11045 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
11046 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
11047 {
11048 /* USAD8 and USADA8 */
11049 record_buf[arm_insn_r->reg_rec_count++]
11050 = bits (arm_insn_r->arm_insn, 16, 19);
11051 }
11052 }
11053 break;
11054
11055 case 7:
11056 {
11057 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
11058 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
11059 {
11060 /* Permanently UNDEFINED */
11061 return -1;
11062 }
11063 else
11064 {
11065 /* BFC, BFI and UBFX */
11066 record_buf[arm_insn_r->reg_rec_count++]
11067 = bits (arm_insn_r->arm_insn, 12, 15);
11068 }
11069 }
11070 break;
11071
11072 default:
11073 return -1;
11074 }
11075
11076 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11077
11078 return 0;
11079 }
11080
11081 /* Handle ARM mode instructions with opcode 010. */
11082
11083 static int
11084 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11085 {
11086 struct regcache *reg_cache = arm_insn_r->regcache;
11087
11088 uint32_t reg_base , reg_dest;
11089 uint32_t offset_12, tgt_mem_addr;
11090 uint32_t record_buf[8], record_buf_mem[8];
11091 unsigned char wback;
11092 ULONGEST u_regval;
11093
11094 /* Calculate wback. */
11095 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11096 || (bit (arm_insn_r->arm_insn, 21) == 1);
11097
11098 arm_insn_r->reg_rec_count = 0;
11099 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11100
11101 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11102 {
11103 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11104 and LDRT. */
11105
11106 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11107 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11108
11109 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11110 preceeds a LDR instruction having R15 as reg_base, it
11111 emulates a branch and link instruction, and hence we need to save
11112 CPSR and PC as well. */
11113 if (ARM_PC_REGNUM == reg_dest)
11114 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11115
11116 /* If wback is true, also save the base register, which is going to be
11117 written to. */
11118 if (wback)
11119 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11120 }
11121 else
11122 {
11123 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11124
11125 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11126 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11127
11128 /* Handle bit U. */
11129 if (bit (arm_insn_r->arm_insn, 23))
11130 {
11131 /* U == 1: Add the offset. */
11132 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11133 }
11134 else
11135 {
11136 /* U == 0: subtract the offset. */
11137 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11138 }
11139
11140 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11141 bytes. */
11142 if (bit (arm_insn_r->arm_insn, 22))
11143 {
11144 /* STRB and STRBT: 1 byte. */
11145 record_buf_mem[0] = 1;
11146 }
11147 else
11148 {
11149 /* STR and STRT: 4 bytes. */
11150 record_buf_mem[0] = 4;
11151 }
11152
11153 /* Handle bit P. */
11154 if (bit (arm_insn_r->arm_insn, 24))
11155 record_buf_mem[1] = tgt_mem_addr;
11156 else
11157 record_buf_mem[1] = (uint32_t) u_regval;
11158
11159 arm_insn_r->mem_rec_count = 1;
11160
11161 /* If wback is true, also save the base register, which is going to be
11162 written to. */
11163 if (wback)
11164 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11165 }
11166
11167 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11168 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11169 return 0;
11170 }
11171
11172 /* Handling opcode 011 insns. */
11173
11174 static int
11175 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11176 {
11177 struct regcache *reg_cache = arm_insn_r->regcache;
11178
11179 uint32_t shift_imm = 0;
11180 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11181 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11182 uint32_t record_buf[8], record_buf_mem[8];
11183
11184 LONGEST s_word;
11185 ULONGEST u_regval[2];
11186
11187 if (bit (arm_insn_r->arm_insn, 4))
11188 return arm_record_media (arm_insn_r);
11189
11190 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11191 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11192
11193 /* Handle enhanced store insns and LDRD DSP insn,
11194 order begins according to addressing modes for store insns
11195 STRH insn. */
11196
11197 /* LDR or STR? */
11198 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11199 {
11200 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11201 /* LDR insn has a capability to do branching, if
11202 MOV LR, PC is preceded by LDR insn having Rn as R15
11203 in that case, it emulates branch and link insn, and hence we
11204 need to save CSPR and PC as well. */
11205 if (15 != reg_dest)
11206 {
11207 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11208 arm_insn_r->reg_rec_count = 1;
11209 }
11210 else
11211 {
11212 record_buf[0] = reg_dest;
11213 record_buf[1] = ARM_PS_REGNUM;
11214 arm_insn_r->reg_rec_count = 2;
11215 }
11216 }
11217 else
11218 {
11219 if (! bits (arm_insn_r->arm_insn, 4, 11))
11220 {
11221 /* Store insn, register offset and register pre-indexed,
11222 register post-indexed. */
11223 /* Get Rm. */
11224 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11225 /* Get Rn. */
11226 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11227 regcache_raw_read_unsigned (reg_cache, reg_src1
11228 , &u_regval[0]);
11229 regcache_raw_read_unsigned (reg_cache, reg_src2
11230 , &u_regval[1]);
11231 if (15 == reg_src2)
11232 {
11233 /* If R15 was used as Rn, hence current PC+8. */
11234 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11235 u_regval[0] = u_regval[0] + 8;
11236 }
11237 /* Calculate target store address, Rn +/- Rm, register offset. */
11238 /* U == 1. */
11239 if (bit (arm_insn_r->arm_insn, 23))
11240 {
11241 tgt_mem_addr = u_regval[0] + u_regval[1];
11242 }
11243 else
11244 {
11245 tgt_mem_addr = u_regval[1] - u_regval[0];
11246 }
11247
11248 switch (arm_insn_r->opcode)
11249 {
11250 /* STR. */
11251 case 8:
11252 case 12:
11253 /* STR. */
11254 case 9:
11255 case 13:
11256 /* STRT. */
11257 case 1:
11258 case 5:
11259 /* STR. */
11260 case 0:
11261 case 4:
11262 record_buf_mem[0] = 4;
11263 break;
11264
11265 /* STRB. */
11266 case 10:
11267 case 14:
11268 /* STRB. */
11269 case 11:
11270 case 15:
11271 /* STRBT. */
11272 case 3:
11273 case 7:
11274 /* STRB. */
11275 case 2:
11276 case 6:
11277 record_buf_mem[0] = 1;
11278 break;
11279
11280 default:
11281 gdb_assert_not_reached ("no decoding pattern found");
11282 break;
11283 }
11284 record_buf_mem[1] = tgt_mem_addr;
11285 arm_insn_r->mem_rec_count = 1;
11286
11287 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11288 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11289 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11290 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11291 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11292 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11293 )
11294 {
11295 /* Rn is going to be changed in pre-indexed mode and
11296 post-indexed mode as well. */
11297 record_buf[0] = reg_src2;
11298 arm_insn_r->reg_rec_count = 1;
11299 }
11300 }
11301 else
11302 {
11303 /* Store insn, scaled register offset; scaled pre-indexed. */
11304 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11305 /* Get Rm. */
11306 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11307 /* Get Rn. */
11308 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11309 /* Get shift_imm. */
11310 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11311 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11312 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11313 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11314 /* Offset_12 used as shift. */
11315 switch (offset_12)
11316 {
11317 case 0:
11318 /* Offset_12 used as index. */
11319 offset_12 = u_regval[0] << shift_imm;
11320 break;
11321
11322 case 1:
11323 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11324 break;
11325
11326 case 2:
11327 if (!shift_imm)
11328 {
11329 if (bit (u_regval[0], 31))
11330 {
11331 offset_12 = 0xFFFFFFFF;
11332 }
11333 else
11334 {
11335 offset_12 = 0;
11336 }
11337 }
11338 else
11339 {
11340 /* This is arithmetic shift. */
11341 offset_12 = s_word >> shift_imm;
11342 }
11343 break;
11344
11345 case 3:
11346 if (!shift_imm)
11347 {
11348 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11349 &u_regval[1]);
11350 /* Get C flag value and shift it by 31. */
11351 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11352 | (u_regval[0]) >> 1);
11353 }
11354 else
11355 {
11356 offset_12 = (u_regval[0] >> shift_imm) \
11357 | (u_regval[0] <<
11358 (sizeof(uint32_t) - shift_imm));
11359 }
11360 break;
11361
11362 default:
11363 gdb_assert_not_reached ("no decoding pattern found");
11364 break;
11365 }
11366
11367 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11368 /* bit U set. */
11369 if (bit (arm_insn_r->arm_insn, 23))
11370 {
11371 tgt_mem_addr = u_regval[1] + offset_12;
11372 }
11373 else
11374 {
11375 tgt_mem_addr = u_regval[1] - offset_12;
11376 }
11377
11378 switch (arm_insn_r->opcode)
11379 {
11380 /* STR. */
11381 case 8:
11382 case 12:
11383 /* STR. */
11384 case 9:
11385 case 13:
11386 /* STRT. */
11387 case 1:
11388 case 5:
11389 /* STR. */
11390 case 0:
11391 case 4:
11392 record_buf_mem[0] = 4;
11393 break;
11394
11395 /* STRB. */
11396 case 10:
11397 case 14:
11398 /* STRB. */
11399 case 11:
11400 case 15:
11401 /* STRBT. */
11402 case 3:
11403 case 7:
11404 /* STRB. */
11405 case 2:
11406 case 6:
11407 record_buf_mem[0] = 1;
11408 break;
11409
11410 default:
11411 gdb_assert_not_reached ("no decoding pattern found");
11412 break;
11413 }
11414 record_buf_mem[1] = tgt_mem_addr;
11415 arm_insn_r->mem_rec_count = 1;
11416
11417 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11418 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11419 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11420 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11421 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11422 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11423 )
11424 {
11425 /* Rn is going to be changed in register scaled pre-indexed
11426 mode,and scaled post indexed mode. */
11427 record_buf[0] = reg_src2;
11428 arm_insn_r->reg_rec_count = 1;
11429 }
11430 }
11431 }
11432
11433 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11434 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11435 return 0;
11436 }
11437
11438 /* Handle ARM mode instructions with opcode 100. */
11439
11440 static int
11441 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11442 {
11443 struct regcache *reg_cache = arm_insn_r->regcache;
11444 uint32_t register_count = 0, register_bits;
11445 uint32_t reg_base, addr_mode;
11446 uint32_t record_buf[24], record_buf_mem[48];
11447 uint32_t wback;
11448 ULONGEST u_regval;
11449
11450 /* Fetch the list of registers. */
11451 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11452 arm_insn_r->reg_rec_count = 0;
11453
11454 /* Fetch the base register that contains the address we are loading data
11455 to. */
11456 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11457
11458 /* Calculate wback. */
11459 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11460
11461 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11462 {
11463 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11464
11465 /* Find out which registers are going to be loaded from memory. */
11466 while (register_bits)
11467 {
11468 if (register_bits & 0x00000001)
11469 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11470 register_bits = register_bits >> 1;
11471 register_count++;
11472 }
11473
11474
11475 /* If wback is true, also save the base register, which is going to be
11476 written to. */
11477 if (wback)
11478 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11479
11480 /* Save the CPSR register. */
11481 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11482 }
11483 else
11484 {
11485 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11486
11487 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11488
11489 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11490
11491 /* Find out how many registers are going to be stored to memory. */
11492 while (register_bits)
11493 {
11494 if (register_bits & 0x00000001)
11495 register_count++;
11496 register_bits = register_bits >> 1;
11497 }
11498
11499 switch (addr_mode)
11500 {
11501 /* STMDA (STMED): Decrement after. */
11502 case 0:
11503 record_buf_mem[1] = (uint32_t) u_regval
11504 - register_count * ARM_INT_REGISTER_SIZE + 4;
11505 break;
11506 /* STM (STMIA, STMEA): Increment after. */
11507 case 1:
11508 record_buf_mem[1] = (uint32_t) u_regval;
11509 break;
11510 /* STMDB (STMFD): Decrement before. */
11511 case 2:
11512 record_buf_mem[1] = (uint32_t) u_regval
11513 - register_count * ARM_INT_REGISTER_SIZE;
11514 break;
11515 /* STMIB (STMFA): Increment before. */
11516 case 3:
11517 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11518 break;
11519 default:
11520 gdb_assert_not_reached ("no decoding pattern found");
11521 break;
11522 }
11523
11524 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11525 arm_insn_r->mem_rec_count = 1;
11526
11527 /* If wback is true, also save the base register, which is going to be
11528 written to. */
11529 if (wback)
11530 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11531 }
11532
11533 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11534 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11535 return 0;
11536 }
11537
11538 /* Handling opcode 101 insns. */
11539
11540 static int
11541 arm_record_b_bl (insn_decode_record *arm_insn_r)
11542 {
11543 uint32_t record_buf[8];
11544
11545 /* Handle B, BL, BLX(1) insns. */
11546 /* B simply branches so we do nothing here. */
11547 /* Note: BLX(1) doesnt fall here but instead it falls into
11548 extension space. */
11549 if (bit (arm_insn_r->arm_insn, 24))
11550 {
11551 record_buf[0] = ARM_LR_REGNUM;
11552 arm_insn_r->reg_rec_count = 1;
11553 }
11554
11555 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11556
11557 return 0;
11558 }
11559
11560 static int
11561 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11562 {
11563 gdb_printf (gdb_stderr,
11564 _("Process record does not support instruction "
11565 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11566 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11567
11568 return -1;
11569 }
11570
11571 /* Record handler for vector data transfer instructions. */
11572
11573 static int
11574 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11575 {
11576 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11577 uint32_t record_buf[4];
11578
11579 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11580 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11581 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11582 bit_l = bit (arm_insn_r->arm_insn, 20);
11583 bit_c = bit (arm_insn_r->arm_insn, 8);
11584
11585 /* Handle VMOV instruction. */
11586 if (bit_l && bit_c)
11587 {
11588 record_buf[0] = reg_t;
11589 arm_insn_r->reg_rec_count = 1;
11590 }
11591 else if (bit_l && !bit_c)
11592 {
11593 /* Handle VMOV instruction. */
11594 if (bits_a == 0x00)
11595 {
11596 record_buf[0] = reg_t;
11597 arm_insn_r->reg_rec_count = 1;
11598 }
11599 /* Handle VMRS instruction. */
11600 else if (bits_a == 0x07)
11601 {
11602 if (reg_t == 15)
11603 reg_t = ARM_PS_REGNUM;
11604
11605 record_buf[0] = reg_t;
11606 arm_insn_r->reg_rec_count = 1;
11607 }
11608 }
11609 else if (!bit_l && !bit_c)
11610 {
11611 /* Handle VMOV instruction. */
11612 if (bits_a == 0x00)
11613 {
11614 record_buf[0] = ARM_D0_REGNUM + reg_v;
11615
11616 arm_insn_r->reg_rec_count = 1;
11617 }
11618 /* Handle VMSR instruction. */
11619 else if (bits_a == 0x07)
11620 {
11621 record_buf[0] = ARM_FPSCR_REGNUM;
11622 arm_insn_r->reg_rec_count = 1;
11623 }
11624 }
11625 else if (!bit_l && bit_c)
11626 {
11627 /* Handle VMOV instruction. */
11628 if (!(bits_a & 0x04))
11629 {
11630 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11631 + ARM_D0_REGNUM;
11632 arm_insn_r->reg_rec_count = 1;
11633 }
11634 /* Handle VDUP instruction. */
11635 else
11636 {
11637 if (bit (arm_insn_r->arm_insn, 21))
11638 {
11639 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11640 record_buf[0] = reg_v + ARM_D0_REGNUM;
11641 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11642 arm_insn_r->reg_rec_count = 2;
11643 }
11644 else
11645 {
11646 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11647 record_buf[0] = reg_v + ARM_D0_REGNUM;
11648 arm_insn_r->reg_rec_count = 1;
11649 }
11650 }
11651 }
11652
11653 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11654 return 0;
11655 }
11656
11657 /* Record handler for extension register load/store instructions. */
11658
11659 static int
11660 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11661 {
11662 uint32_t opcode, single_reg;
11663 uint8_t op_vldm_vstm;
11664 uint32_t record_buf[8], record_buf_mem[128];
11665 ULONGEST u_regval = 0;
11666
11667 struct regcache *reg_cache = arm_insn_r->regcache;
11668
11669 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11670 single_reg = !bit (arm_insn_r->arm_insn, 8);
11671 op_vldm_vstm = opcode & 0x1b;
11672
11673 /* Handle VMOV instructions. */
11674 if ((opcode & 0x1e) == 0x04)
11675 {
11676 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11677 {
11678 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11679 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11680 arm_insn_r->reg_rec_count = 2;
11681 }
11682 else
11683 {
11684 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11685 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11686
11687 if (single_reg)
11688 {
11689 /* The first S register number m is REG_M:M (M is bit 5),
11690 the corresponding D register number is REG_M:M / 2, which
11691 is REG_M. */
11692 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11693 /* The second S register number is REG_M:M + 1, the
11694 corresponding D register number is (REG_M:M + 1) / 2.
11695 IOW, if bit M is 1, the first and second S registers
11696 are mapped to different D registers, otherwise, they are
11697 in the same D register. */
11698 if (bit_m)
11699 {
11700 record_buf[arm_insn_r->reg_rec_count++]
11701 = ARM_D0_REGNUM + reg_m + 1;
11702 }
11703 }
11704 else
11705 {
11706 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11707 arm_insn_r->reg_rec_count = 1;
11708 }
11709 }
11710 }
11711 /* Handle VSTM and VPUSH instructions. */
11712 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11713 || op_vldm_vstm == 0x12)
11714 {
11715 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11716 uint32_t memory_index = 0;
11717
11718 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11719 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11720 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11721 imm_off32 = imm_off8 << 2;
11722 memory_count = imm_off8;
11723
11724 if (bit (arm_insn_r->arm_insn, 23))
11725 start_address = u_regval;
11726 else
11727 start_address = u_regval - imm_off32;
11728
11729 if (bit (arm_insn_r->arm_insn, 21))
11730 {
11731 record_buf[0] = reg_rn;
11732 arm_insn_r->reg_rec_count = 1;
11733 }
11734
11735 while (memory_count > 0)
11736 {
11737 if (single_reg)
11738 {
11739 record_buf_mem[memory_index] = 4;
11740 record_buf_mem[memory_index + 1] = start_address;
11741 start_address = start_address + 4;
11742 memory_index = memory_index + 2;
11743 }
11744 else
11745 {
11746 record_buf_mem[memory_index] = 4;
11747 record_buf_mem[memory_index + 1] = start_address;
11748 record_buf_mem[memory_index + 2] = 4;
11749 record_buf_mem[memory_index + 3] = start_address + 4;
11750 start_address = start_address + 8;
11751 memory_index = memory_index + 4;
11752 }
11753 memory_count--;
11754 }
11755 arm_insn_r->mem_rec_count = (memory_index >> 1);
11756 }
11757 /* Handle VLDM instructions. */
11758 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11759 || op_vldm_vstm == 0x13)
11760 {
11761 uint32_t reg_count, reg_vd;
11762 uint32_t reg_index = 0;
11763 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11764
11765 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11766 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11767
11768 /* REG_VD is the first D register number. If the instruction
11769 loads memory to S registers (SINGLE_REG is TRUE), the register
11770 number is (REG_VD << 1 | bit D), so the corresponding D
11771 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11772 if (!single_reg)
11773 reg_vd = reg_vd | (bit_d << 4);
11774
11775 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11776 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11777
11778 /* If the instruction loads memory to D register, REG_COUNT should
11779 be divided by 2, according to the ARM Architecture Reference
11780 Manual. If the instruction loads memory to S register, divide by
11781 2 as well because two S registers are mapped to D register. */
11782 reg_count = reg_count / 2;
11783 if (single_reg && bit_d)
11784 {
11785 /* Increase the register count if S register list starts from
11786 an odd number (bit d is one). */
11787 reg_count++;
11788 }
11789
11790 while (reg_count > 0)
11791 {
11792 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11793 reg_count--;
11794 }
11795 arm_insn_r->reg_rec_count = reg_index;
11796 }
11797 /* VSTR Vector store register. */
11798 else if ((opcode & 0x13) == 0x10)
11799 {
11800 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11801 uint32_t memory_index = 0;
11802
11803 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11804 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11805 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11806 imm_off32 = imm_off8 << 2;
11807
11808 if (bit (arm_insn_r->arm_insn, 23))
11809 start_address = u_regval + imm_off32;
11810 else
11811 start_address = u_regval - imm_off32;
11812
11813 if (single_reg)
11814 {
11815 record_buf_mem[memory_index] = 4;
11816 record_buf_mem[memory_index + 1] = start_address;
11817 arm_insn_r->mem_rec_count = 1;
11818 }
11819 else
11820 {
11821 record_buf_mem[memory_index] = 4;
11822 record_buf_mem[memory_index + 1] = start_address;
11823 record_buf_mem[memory_index + 2] = 4;
11824 record_buf_mem[memory_index + 3] = start_address + 4;
11825 arm_insn_r->mem_rec_count = 2;
11826 }
11827 }
11828 /* VLDR Vector load register. */
11829 else if ((opcode & 0x13) == 0x11)
11830 {
11831 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11832
11833 if (!single_reg)
11834 {
11835 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11836 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11837 }
11838 else
11839 {
11840 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11841 /* Record register D rather than pseudo register S. */
11842 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11843 }
11844 arm_insn_r->reg_rec_count = 1;
11845 }
11846
11847 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11848 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11849 return 0;
11850 }
11851
11852 /* Record handler for arm/thumb mode VFP data processing instructions. */
11853
11854 static int
11855 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11856 {
11857 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11858 uint32_t record_buf[4];
11859 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11860 enum insn_types curr_insn_type = INSN_INV;
11861
11862 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11863 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11864 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11865 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11866 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11867 bit_d = bit (arm_insn_r->arm_insn, 22);
11868 /* Mask off the "D" bit. */
11869 opc1 = opc1 & ~0x04;
11870
11871 /* Handle VMLA, VMLS. */
11872 if (opc1 == 0x00)
11873 {
11874 if (bit (arm_insn_r->arm_insn, 10))
11875 {
11876 if (bit (arm_insn_r->arm_insn, 6))
11877 curr_insn_type = INSN_T0;
11878 else
11879 curr_insn_type = INSN_T1;
11880 }
11881 else
11882 {
11883 if (dp_op_sz)
11884 curr_insn_type = INSN_T1;
11885 else
11886 curr_insn_type = INSN_T2;
11887 }
11888 }
11889 /* Handle VNMLA, VNMLS, VNMUL. */
11890 else if (opc1 == 0x01)
11891 {
11892 if (dp_op_sz)
11893 curr_insn_type = INSN_T1;
11894 else
11895 curr_insn_type = INSN_T2;
11896 }
11897 /* Handle VMUL. */
11898 else if (opc1 == 0x02 && !(opc3 & 0x01))
11899 {
11900 if (bit (arm_insn_r->arm_insn, 10))
11901 {
11902 if (bit (arm_insn_r->arm_insn, 6))
11903 curr_insn_type = INSN_T0;
11904 else
11905 curr_insn_type = INSN_T1;
11906 }
11907 else
11908 {
11909 if (dp_op_sz)
11910 curr_insn_type = INSN_T1;
11911 else
11912 curr_insn_type = INSN_T2;
11913 }
11914 }
11915 /* Handle VADD, VSUB. */
11916 else if (opc1 == 0x03)
11917 {
11918 if (!bit (arm_insn_r->arm_insn, 9))
11919 {
11920 if (bit (arm_insn_r->arm_insn, 6))
11921 curr_insn_type = INSN_T0;
11922 else
11923 curr_insn_type = INSN_T1;
11924 }
11925 else
11926 {
11927 if (dp_op_sz)
11928 curr_insn_type = INSN_T1;
11929 else
11930 curr_insn_type = INSN_T2;
11931 }
11932 }
11933 /* Handle VDIV. */
11934 else if (opc1 == 0x08)
11935 {
11936 if (dp_op_sz)
11937 curr_insn_type = INSN_T1;
11938 else
11939 curr_insn_type = INSN_T2;
11940 }
11941 /* Handle all other vfp data processing instructions. */
11942 else if (opc1 == 0x0b)
11943 {
11944 /* Handle VMOV. */
11945 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11946 {
11947 if (bit (arm_insn_r->arm_insn, 4))
11948 {
11949 if (bit (arm_insn_r->arm_insn, 6))
11950 curr_insn_type = INSN_T0;
11951 else
11952 curr_insn_type = INSN_T1;
11953 }
11954 else
11955 {
11956 if (dp_op_sz)
11957 curr_insn_type = INSN_T1;
11958 else
11959 curr_insn_type = INSN_T2;
11960 }
11961 }
11962 /* Handle VNEG and VABS. */
11963 else if ((opc2 == 0x01 && opc3 == 0x01)
11964 || (opc2 == 0x00 && opc3 == 0x03))
11965 {
11966 if (!bit (arm_insn_r->arm_insn, 11))
11967 {
11968 if (bit (arm_insn_r->arm_insn, 6))
11969 curr_insn_type = INSN_T0;
11970 else
11971 curr_insn_type = INSN_T1;
11972 }
11973 else
11974 {
11975 if (dp_op_sz)
11976 curr_insn_type = INSN_T1;
11977 else
11978 curr_insn_type = INSN_T2;
11979 }
11980 }
11981 /* Handle VSQRT. */
11982 else if (opc2 == 0x01 && opc3 == 0x03)
11983 {
11984 if (dp_op_sz)
11985 curr_insn_type = INSN_T1;
11986 else
11987 curr_insn_type = INSN_T2;
11988 }
11989 /* Handle VCVT. */
11990 else if (opc2 == 0x07 && opc3 == 0x03)
11991 {
11992 if (!dp_op_sz)
11993 curr_insn_type = INSN_T1;
11994 else
11995 curr_insn_type = INSN_T2;
11996 }
11997 else if (opc3 & 0x01)
11998 {
11999 /* Handle VCVT. */
12000 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12001 {
12002 if (!bit (arm_insn_r->arm_insn, 18))
12003 curr_insn_type = INSN_T2;
12004 else
12005 {
12006 if (dp_op_sz)
12007 curr_insn_type = INSN_T1;
12008 else
12009 curr_insn_type = INSN_T2;
12010 }
12011 }
12012 /* Handle VCVT. */
12013 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12014 {
12015 if (dp_op_sz)
12016 curr_insn_type = INSN_T1;
12017 else
12018 curr_insn_type = INSN_T2;
12019 }
12020 /* Handle VCVTB, VCVTT. */
12021 else if ((opc2 & 0x0e) == 0x02)
12022 curr_insn_type = INSN_T2;
12023 /* Handle VCMP, VCMPE. */
12024 else if ((opc2 & 0x0e) == 0x04)
12025 curr_insn_type = INSN_T3;
12026 }
12027 }
12028
12029 switch (curr_insn_type)
12030 {
12031 case INSN_T0:
12032 reg_vd = reg_vd | (bit_d << 4);
12033 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12034 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12035 arm_insn_r->reg_rec_count = 2;
12036 break;
12037
12038 case INSN_T1:
12039 reg_vd = reg_vd | (bit_d << 4);
12040 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12041 arm_insn_r->reg_rec_count = 1;
12042 break;
12043
12044 case INSN_T2:
12045 reg_vd = (reg_vd << 1) | bit_d;
12046 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12047 arm_insn_r->reg_rec_count = 1;
12048 break;
12049
12050 case INSN_T3:
12051 record_buf[0] = ARM_FPSCR_REGNUM;
12052 arm_insn_r->reg_rec_count = 1;
12053 break;
12054
12055 default:
12056 gdb_assert_not_reached ("no decoding pattern found");
12057 break;
12058 }
12059
12060 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12061 return 0;
12062 }
12063
12064 /* Handling opcode 110 insns. */
12065
12066 static int
12067 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12068 {
12069 uint32_t op1, op1_ebit, coproc;
12070
12071 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12072 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12073 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12074
12075 if ((coproc & 0x0e) == 0x0a)
12076 {
12077 /* Handle extension register ld/st instructions. */
12078 if (!(op1 & 0x20))
12079 return arm_record_exreg_ld_st_insn (arm_insn_r);
12080
12081 /* 64-bit transfers between arm core and extension registers. */
12082 if ((op1 & 0x3e) == 0x04)
12083 return arm_record_exreg_ld_st_insn (arm_insn_r);
12084 }
12085 else
12086 {
12087 /* Handle coprocessor ld/st instructions. */
12088 if (!(op1 & 0x3a))
12089 {
12090 /* Store. */
12091 if (!op1_ebit)
12092 return arm_record_unsupported_insn (arm_insn_r);
12093 else
12094 /* Load. */
12095 return arm_record_unsupported_insn (arm_insn_r);
12096 }
12097
12098 /* Move to coprocessor from two arm core registers. */
12099 if (op1 == 0x4)
12100 return arm_record_unsupported_insn (arm_insn_r);
12101
12102 /* Move to two arm core registers from coprocessor. */
12103 if (op1 == 0x5)
12104 {
12105 uint32_t reg_t[2];
12106
12107 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12108 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12109 arm_insn_r->reg_rec_count = 2;
12110
12111 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12112 return 0;
12113 }
12114 }
12115 return arm_record_unsupported_insn (arm_insn_r);
12116 }
12117
12118 /* Handling opcode 111 insns. */
12119
12120 static int
12121 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12122 {
12123 uint32_t op, op1_ebit, coproc, bits_24_25;
12124 arm_gdbarch_tdep *tdep
12125 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
12126 struct regcache *reg_cache = arm_insn_r->regcache;
12127
12128 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12129 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12130 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12131 op = bit (arm_insn_r->arm_insn, 4);
12132 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
12133
12134 /* Handle arm SWI/SVC system call instructions. */
12135 if (bits_24_25 == 0x3)
12136 {
12137 if (tdep->arm_syscall_record != NULL)
12138 {
12139 ULONGEST svc_operand, svc_number;
12140
12141 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12142
12143 if (svc_operand) /* OABI. */
12144 svc_number = svc_operand - 0x900000;
12145 else /* EABI. */
12146 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12147
12148 return tdep->arm_syscall_record (reg_cache, svc_number);
12149 }
12150 else
12151 {
12152 gdb_printf (gdb_stderr, _("no syscall record support\n"));
12153 return -1;
12154 }
12155 }
12156 else if (bits_24_25 == 0x02)
12157 {
12158 if (op)
12159 {
12160 if ((coproc & 0x0e) == 0x0a)
12161 {
12162 /* 8, 16, and 32-bit transfer */
12163 return arm_record_vdata_transfer_insn (arm_insn_r);
12164 }
12165 else
12166 {
12167 if (op1_ebit)
12168 {
12169 /* MRC, MRC2 */
12170 uint32_t record_buf[1];
12171
12172 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12173 if (record_buf[0] == 15)
12174 record_buf[0] = ARM_PS_REGNUM;
12175
12176 arm_insn_r->reg_rec_count = 1;
12177 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12178 record_buf);
12179 return 0;
12180 }
12181 else
12182 {
12183 /* MCR, MCR2 */
12184 return -1;
12185 }
12186 }
12187 }
12188 else
12189 {
12190 if ((coproc & 0x0e) == 0x0a)
12191 {
12192 /* VFP data-processing instructions. */
12193 return arm_record_vfp_data_proc_insn (arm_insn_r);
12194 }
12195 else
12196 {
12197 /* CDP, CDP2 */
12198 return -1;
12199 }
12200 }
12201 }
12202 else
12203 {
12204 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
12205
12206 if (op1 == 5)
12207 {
12208 if ((coproc & 0x0e) != 0x0a)
12209 {
12210 /* MRRC, MRRC2 */
12211 return -1;
12212 }
12213 }
12214 else if (op1 == 4 || op1 == 5)
12215 {
12216 if ((coproc & 0x0e) == 0x0a)
12217 {
12218 /* 64-bit transfers between ARM core and extension */
12219 return -1;
12220 }
12221 else if (op1 == 4)
12222 {
12223 /* MCRR, MCRR2 */
12224 return -1;
12225 }
12226 }
12227 else if (op1 == 0 || op1 == 1)
12228 {
12229 /* UNDEFINED */
12230 return -1;
12231 }
12232 else
12233 {
12234 if ((coproc & 0x0e) == 0x0a)
12235 {
12236 /* Extension register load/store */
12237 }
12238 else
12239 {
12240 /* STC, STC2, LDC, LDC2 */
12241 }
12242 return -1;
12243 }
12244 }
12245
12246 return -1;
12247 }
12248
12249 /* Handling opcode 000 insns. */
12250
12251 static int
12252 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12253 {
12254 uint32_t record_buf[8];
12255 uint32_t reg_src1 = 0;
12256
12257 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12258
12259 record_buf[0] = ARM_PS_REGNUM;
12260 record_buf[1] = reg_src1;
12261 thumb_insn_r->reg_rec_count = 2;
12262
12263 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12264
12265 return 0;
12266 }
12267
12268
12269 /* Handling opcode 001 insns. */
12270
12271 static int
12272 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12273 {
12274 uint32_t record_buf[8];
12275 uint32_t reg_src1 = 0;
12276
12277 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12278
12279 record_buf[0] = ARM_PS_REGNUM;
12280 record_buf[1] = reg_src1;
12281 thumb_insn_r->reg_rec_count = 2;
12282
12283 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12284
12285 return 0;
12286 }
12287
12288 /* Handling opcode 010 insns. */
12289
12290 static int
12291 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12292 {
12293 struct regcache *reg_cache = thumb_insn_r->regcache;
12294 uint32_t record_buf[8], record_buf_mem[8];
12295
12296 uint32_t reg_src1 = 0, reg_src2 = 0;
12297 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12298
12299 ULONGEST u_regval[2] = {0};
12300
12301 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12302
12303 if (bit (thumb_insn_r->arm_insn, 12))
12304 {
12305 /* Handle load/store register offset. */
12306 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12307
12308 if (in_inclusive_range (opB, 4U, 7U))
12309 {
12310 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12311 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12312 record_buf[0] = reg_src1;
12313 thumb_insn_r->reg_rec_count = 1;
12314 }
12315 else if (in_inclusive_range (opB, 0U, 2U))
12316 {
12317 /* STR(2), STRB(2), STRH(2) . */
12318 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12319 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12320 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12321 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12322 if (0 == opB)
12323 record_buf_mem[0] = 4; /* STR (2). */
12324 else if (2 == opB)
12325 record_buf_mem[0] = 1; /* STRB (2). */
12326 else if (1 == opB)
12327 record_buf_mem[0] = 2; /* STRH (2). */
12328 record_buf_mem[1] = u_regval[0] + u_regval[1];
12329 thumb_insn_r->mem_rec_count = 1;
12330 }
12331 }
12332 else if (bit (thumb_insn_r->arm_insn, 11))
12333 {
12334 /* Handle load from literal pool. */
12335 /* LDR(3). */
12336 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12337 record_buf[0] = reg_src1;
12338 thumb_insn_r->reg_rec_count = 1;
12339 }
12340 else if (opcode1)
12341 {
12342 /* Special data instructions and branch and exchange */
12343 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12344 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12345 if ((3 == opcode2) && (!opcode3))
12346 {
12347 /* Branch with exchange. */
12348 record_buf[0] = ARM_PS_REGNUM;
12349 thumb_insn_r->reg_rec_count = 1;
12350 }
12351 else
12352 {
12353 /* Format 8; special data processing insns. */
12354 record_buf[0] = ARM_PS_REGNUM;
12355 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
12356 | bits (thumb_insn_r->arm_insn, 0, 2));
12357 thumb_insn_r->reg_rec_count = 2;
12358 }
12359 }
12360 else
12361 {
12362 /* Format 5; data processing insns. */
12363 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12364 if (bit (thumb_insn_r->arm_insn, 7))
12365 {
12366 reg_src1 = reg_src1 + 8;
12367 }
12368 record_buf[0] = ARM_PS_REGNUM;
12369 record_buf[1] = reg_src1;
12370 thumb_insn_r->reg_rec_count = 2;
12371 }
12372
12373 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12374 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12375 record_buf_mem);
12376
12377 return 0;
12378 }
12379
12380 /* Handling opcode 001 insns. */
12381
12382 static int
12383 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12384 {
12385 struct regcache *reg_cache = thumb_insn_r->regcache;
12386 uint32_t record_buf[8], record_buf_mem[8];
12387
12388 uint32_t reg_src1 = 0;
12389 uint32_t opcode = 0, immed_5 = 0;
12390
12391 ULONGEST u_regval = 0;
12392
12393 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12394
12395 if (opcode)
12396 {
12397 /* LDR(1). */
12398 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12399 record_buf[0] = reg_src1;
12400 thumb_insn_r->reg_rec_count = 1;
12401 }
12402 else
12403 {
12404 /* STR(1). */
12405 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12406 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12407 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12408 record_buf_mem[0] = 4;
12409 record_buf_mem[1] = u_regval + (immed_5 * 4);
12410 thumb_insn_r->mem_rec_count = 1;
12411 }
12412
12413 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12414 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12415 record_buf_mem);
12416
12417 return 0;
12418 }
12419
12420 /* Handling opcode 100 insns. */
12421
12422 static int
12423 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12424 {
12425 struct regcache *reg_cache = thumb_insn_r->regcache;
12426 uint32_t record_buf[8], record_buf_mem[8];
12427
12428 uint32_t reg_src1 = 0;
12429 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12430
12431 ULONGEST u_regval = 0;
12432
12433 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12434
12435 if (3 == opcode)
12436 {
12437 /* LDR(4). */
12438 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12439 record_buf[0] = reg_src1;
12440 thumb_insn_r->reg_rec_count = 1;
12441 }
12442 else if (1 == opcode)
12443 {
12444 /* LDRH(1). */
12445 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12446 record_buf[0] = reg_src1;
12447 thumb_insn_r->reg_rec_count = 1;
12448 }
12449 else if (2 == opcode)
12450 {
12451 /* STR(3). */
12452 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12453 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12454 record_buf_mem[0] = 4;
12455 record_buf_mem[1] = u_regval + (immed_8 * 4);
12456 thumb_insn_r->mem_rec_count = 1;
12457 }
12458 else if (0 == opcode)
12459 {
12460 /* STRH(1). */
12461 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12462 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12463 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12464 record_buf_mem[0] = 2;
12465 record_buf_mem[1] = u_regval + (immed_5 * 2);
12466 thumb_insn_r->mem_rec_count = 1;
12467 }
12468
12469 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12470 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12471 record_buf_mem);
12472
12473 return 0;
12474 }
12475
12476 /* Handling opcode 101 insns. */
12477
12478 static int
12479 thumb_record_misc (insn_decode_record *thumb_insn_r)
12480 {
12481 struct regcache *reg_cache = thumb_insn_r->regcache;
12482
12483 uint32_t opcode = 0;
12484 uint32_t register_bits = 0, register_count = 0;
12485 uint32_t index = 0, start_address = 0;
12486 uint32_t record_buf[24], record_buf_mem[48];
12487 uint32_t reg_src1;
12488
12489 ULONGEST u_regval = 0;
12490
12491 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12492
12493 if (opcode == 0 || opcode == 1)
12494 {
12495 /* ADR and ADD (SP plus immediate) */
12496
12497 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12498 record_buf[0] = reg_src1;
12499 thumb_insn_r->reg_rec_count = 1;
12500 }
12501 else
12502 {
12503 /* Miscellaneous 16-bit instructions */
12504 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12505
12506 switch (opcode2)
12507 {
12508 case 6:
12509 /* SETEND and CPS */
12510 break;
12511 case 0:
12512 /* ADD/SUB (SP plus immediate) */
12513 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12514 record_buf[0] = ARM_SP_REGNUM;
12515 thumb_insn_r->reg_rec_count = 1;
12516 break;
12517 case 1: /* fall through */
12518 case 3: /* fall through */
12519 case 9: /* fall through */
12520 case 11:
12521 /* CBNZ, CBZ */
12522 break;
12523 case 2:
12524 /* SXTH, SXTB, UXTH, UXTB */
12525 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12526 thumb_insn_r->reg_rec_count = 1;
12527 break;
12528 case 4: /* fall through */
12529 case 5:
12530 /* PUSH. */
12531 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12532 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12533 while (register_bits)
12534 {
12535 if (register_bits & 0x00000001)
12536 register_count++;
12537 register_bits = register_bits >> 1;
12538 }
12539 start_address = u_regval - \
12540 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12541 thumb_insn_r->mem_rec_count = register_count;
12542 while (register_count)
12543 {
12544 record_buf_mem[(register_count * 2) - 1] = start_address;
12545 record_buf_mem[(register_count * 2) - 2] = 4;
12546 start_address = start_address + 4;
12547 register_count--;
12548 }
12549 record_buf[0] = ARM_SP_REGNUM;
12550 thumb_insn_r->reg_rec_count = 1;
12551 break;
12552 case 10:
12553 /* REV, REV16, REVSH */
12554 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12555 thumb_insn_r->reg_rec_count = 1;
12556 break;
12557 case 12: /* fall through */
12558 case 13:
12559 /* POP. */
12560 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12561 while (register_bits)
12562 {
12563 if (register_bits & 0x00000001)
12564 record_buf[index++] = register_count;
12565 register_bits = register_bits >> 1;
12566 register_count++;
12567 }
12568 record_buf[index++] = ARM_PS_REGNUM;
12569 record_buf[index++] = ARM_SP_REGNUM;
12570 thumb_insn_r->reg_rec_count = index;
12571 break;
12572 case 0xe:
12573 /* BKPT insn. */
12574 /* Handle enhanced software breakpoint insn, BKPT. */
12575 /* CPSR is changed to be executed in ARM state, disabling normal
12576 interrupts, entering abort mode. */
12577 /* According to high vector configuration PC is set. */
12578 /* User hits breakpoint and type reverse, in that case, we need to go back with
12579 previous CPSR and Program Counter. */
12580 record_buf[0] = ARM_PS_REGNUM;
12581 record_buf[1] = ARM_LR_REGNUM;
12582 thumb_insn_r->reg_rec_count = 2;
12583 /* We need to save SPSR value, which is not yet done. */
12584 gdb_printf (gdb_stderr,
12585 _("Process record does not support instruction "
12586 "0x%0x at address %s.\n"),
12587 thumb_insn_r->arm_insn,
12588 paddress (thumb_insn_r->gdbarch,
12589 thumb_insn_r->this_addr));
12590 return -1;
12591
12592 case 0xf:
12593 /* If-Then, and hints */
12594 break;
12595 default:
12596 return -1;
12597 };
12598 }
12599
12600 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12601 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12602 record_buf_mem);
12603
12604 return 0;
12605 }
12606
12607 /* Handling opcode 110 insns. */
12608
12609 static int
12610 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12611 {
12612 arm_gdbarch_tdep *tdep
12613 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
12614 struct regcache *reg_cache = thumb_insn_r->regcache;
12615
12616 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12617 uint32_t reg_src1 = 0;
12618 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12619 uint32_t index = 0, start_address = 0;
12620 uint32_t record_buf[24], record_buf_mem[48];
12621
12622 ULONGEST u_regval = 0;
12623
12624 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12625 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12626
12627 if (1 == opcode2)
12628 {
12629
12630 /* LDMIA. */
12631 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12632 /* Get Rn. */
12633 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12634 while (register_bits)
12635 {
12636 if (register_bits & 0x00000001)
12637 record_buf[index++] = register_count;
12638 register_bits = register_bits >> 1;
12639 register_count++;
12640 }
12641 record_buf[index++] = reg_src1;
12642 thumb_insn_r->reg_rec_count = index;
12643 }
12644 else if (0 == opcode2)
12645 {
12646 /* It handles both STMIA. */
12647 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12648 /* Get Rn. */
12649 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12650 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12651 while (register_bits)
12652 {
12653 if (register_bits & 0x00000001)
12654 register_count++;
12655 register_bits = register_bits >> 1;
12656 }
12657 start_address = u_regval;
12658 thumb_insn_r->mem_rec_count = register_count;
12659 while (register_count)
12660 {
12661 record_buf_mem[(register_count * 2) - 1] = start_address;
12662 record_buf_mem[(register_count * 2) - 2] = 4;
12663 start_address = start_address + 4;
12664 register_count--;
12665 }
12666 }
12667 else if (0x1F == opcode1)
12668 {
12669 /* Handle arm syscall insn. */
12670 if (tdep->arm_syscall_record != NULL)
12671 {
12672 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12673 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12674 }
12675 else
12676 {
12677 gdb_printf (gdb_stderr, _("no syscall record support\n"));
12678 return -1;
12679 }
12680 }
12681
12682 /* B (1), conditional branch is automatically taken care in process_record,
12683 as PC is saved there. */
12684
12685 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12686 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12687 record_buf_mem);
12688
12689 return ret;
12690 }
12691
12692 /* Handling opcode 111 insns. */
12693
12694 static int
12695 thumb_record_branch (insn_decode_record *thumb_insn_r)
12696 {
12697 uint32_t record_buf[8];
12698 uint32_t bits_h = 0;
12699
12700 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12701
12702 if (2 == bits_h || 3 == bits_h)
12703 {
12704 /* BL */
12705 record_buf[0] = ARM_LR_REGNUM;
12706 thumb_insn_r->reg_rec_count = 1;
12707 }
12708 else if (1 == bits_h)
12709 {
12710 /* BLX(1). */
12711 record_buf[0] = ARM_PS_REGNUM;
12712 record_buf[1] = ARM_LR_REGNUM;
12713 thumb_insn_r->reg_rec_count = 2;
12714 }
12715
12716 /* B(2) is automatically taken care in process_record, as PC is
12717 saved there. */
12718
12719 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12720
12721 return 0;
12722 }
12723
12724 /* Handler for thumb2 load/store multiple instructions. */
12725
12726 static int
12727 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12728 {
12729 struct regcache *reg_cache = thumb2_insn_r->regcache;
12730
12731 uint32_t reg_rn, op;
12732 uint32_t register_bits = 0, register_count = 0;
12733 uint32_t index = 0, start_address = 0;
12734 uint32_t record_buf[24], record_buf_mem[48];
12735
12736 ULONGEST u_regval = 0;
12737
12738 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12739 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12740
12741 if (0 == op || 3 == op)
12742 {
12743 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12744 {
12745 /* Handle RFE instruction. */
12746 record_buf[0] = ARM_PS_REGNUM;
12747 thumb2_insn_r->reg_rec_count = 1;
12748 }
12749 else
12750 {
12751 /* Handle SRS instruction after reading banked SP. */
12752 return arm_record_unsupported_insn (thumb2_insn_r);
12753 }
12754 }
12755 else if (1 == op || 2 == op)
12756 {
12757 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12758 {
12759 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12760 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12761 while (register_bits)
12762 {
12763 if (register_bits & 0x00000001)
12764 record_buf[index++] = register_count;
12765
12766 register_count++;
12767 register_bits = register_bits >> 1;
12768 }
12769 record_buf[index++] = reg_rn;
12770 record_buf[index++] = ARM_PS_REGNUM;
12771 thumb2_insn_r->reg_rec_count = index;
12772 }
12773 else
12774 {
12775 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12776 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12777 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12778 while (register_bits)
12779 {
12780 if (register_bits & 0x00000001)
12781 register_count++;
12782
12783 register_bits = register_bits >> 1;
12784 }
12785
12786 if (1 == op)
12787 {
12788 /* Start address calculation for LDMDB/LDMEA. */
12789 start_address = u_regval;
12790 }
12791 else if (2 == op)
12792 {
12793 /* Start address calculation for LDMDB/LDMEA. */
12794 start_address = u_regval - register_count * 4;
12795 }
12796
12797 thumb2_insn_r->mem_rec_count = register_count;
12798 while (register_count)
12799 {
12800 record_buf_mem[register_count * 2 - 1] = start_address;
12801 record_buf_mem[register_count * 2 - 2] = 4;
12802 start_address = start_address + 4;
12803 register_count--;
12804 }
12805 record_buf[0] = reg_rn;
12806 record_buf[1] = ARM_PS_REGNUM;
12807 thumb2_insn_r->reg_rec_count = 2;
12808 }
12809 }
12810
12811 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12812 record_buf_mem);
12813 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12814 record_buf);
12815 return ARM_RECORD_SUCCESS;
12816 }
12817
12818 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12819 instructions. */
12820
12821 static int
12822 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12823 {
12824 struct regcache *reg_cache = thumb2_insn_r->regcache;
12825
12826 uint32_t reg_rd, reg_rn, offset_imm;
12827 uint32_t reg_dest1, reg_dest2;
12828 uint32_t address, offset_addr;
12829 uint32_t record_buf[8], record_buf_mem[8];
12830 uint32_t op1, op2, op3;
12831
12832 ULONGEST u_regval[2];
12833
12834 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12835 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12836 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12837
12838 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12839 {
12840 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12841 {
12842 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12843 record_buf[0] = reg_dest1;
12844 record_buf[1] = ARM_PS_REGNUM;
12845 thumb2_insn_r->reg_rec_count = 2;
12846 }
12847
12848 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12849 {
12850 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12851 record_buf[2] = reg_dest2;
12852 thumb2_insn_r->reg_rec_count = 3;
12853 }
12854 }
12855 else
12856 {
12857 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12858 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12859
12860 if (0 == op1 && 0 == op2)
12861 {
12862 /* Handle STREX. */
12863 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12864 address = u_regval[0] + (offset_imm * 4);
12865 record_buf_mem[0] = 4;
12866 record_buf_mem[1] = address;
12867 thumb2_insn_r->mem_rec_count = 1;
12868 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12869 record_buf[0] = reg_rd;
12870 thumb2_insn_r->reg_rec_count = 1;
12871 }
12872 else if (1 == op1 && 0 == op2)
12873 {
12874 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12875 record_buf[0] = reg_rd;
12876 thumb2_insn_r->reg_rec_count = 1;
12877 address = u_regval[0];
12878 record_buf_mem[1] = address;
12879
12880 if (4 == op3)
12881 {
12882 /* Handle STREXB. */
12883 record_buf_mem[0] = 1;
12884 thumb2_insn_r->mem_rec_count = 1;
12885 }
12886 else if (5 == op3)
12887 {
12888 /* Handle STREXH. */
12889 record_buf_mem[0] = 2 ;
12890 thumb2_insn_r->mem_rec_count = 1;
12891 }
12892 else if (7 == op3)
12893 {
12894 /* Handle STREXD. */
12895 address = u_regval[0];
12896 record_buf_mem[0] = 4;
12897 record_buf_mem[2] = 4;
12898 record_buf_mem[3] = address + 4;
12899 thumb2_insn_r->mem_rec_count = 2;
12900 }
12901 }
12902 else
12903 {
12904 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12905
12906 if (bit (thumb2_insn_r->arm_insn, 24))
12907 {
12908 if (bit (thumb2_insn_r->arm_insn, 23))
12909 offset_addr = u_regval[0] + (offset_imm * 4);
12910 else
12911 offset_addr = u_regval[0] - (offset_imm * 4);
12912
12913 address = offset_addr;
12914 }
12915 else
12916 address = u_regval[0];
12917
12918 record_buf_mem[0] = 4;
12919 record_buf_mem[1] = address;
12920 record_buf_mem[2] = 4;
12921 record_buf_mem[3] = address + 4;
12922 thumb2_insn_r->mem_rec_count = 2;
12923 record_buf[0] = reg_rn;
12924 thumb2_insn_r->reg_rec_count = 1;
12925 }
12926 }
12927
12928 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12929 record_buf);
12930 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12931 record_buf_mem);
12932 return ARM_RECORD_SUCCESS;
12933 }
12934
12935 /* Handler for thumb2 data processing (shift register and modified immediate)
12936 instructions. */
12937
12938 static int
12939 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12940 {
12941 uint32_t reg_rd, op;
12942 uint32_t record_buf[8];
12943
12944 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12945 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12946
12947 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12948 {
12949 record_buf[0] = ARM_PS_REGNUM;
12950 thumb2_insn_r->reg_rec_count = 1;
12951 }
12952 else
12953 {
12954 record_buf[0] = reg_rd;
12955 record_buf[1] = ARM_PS_REGNUM;
12956 thumb2_insn_r->reg_rec_count = 2;
12957 }
12958
12959 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12960 record_buf);
12961 return ARM_RECORD_SUCCESS;
12962 }
12963
12964 /* Generic handler for thumb2 instructions which effect destination and PS
12965 registers. */
12966
12967 static int
12968 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12969 {
12970 uint32_t reg_rd;
12971 uint32_t record_buf[8];
12972
12973 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12974
12975 record_buf[0] = reg_rd;
12976 record_buf[1] = ARM_PS_REGNUM;
12977 thumb2_insn_r->reg_rec_count = 2;
12978
12979 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12980 record_buf);
12981 return ARM_RECORD_SUCCESS;
12982 }
12983
12984 /* Handler for thumb2 branch and miscellaneous control instructions. */
12985
12986 static int
12987 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12988 {
12989 uint32_t op, op1, op2;
12990 uint32_t record_buf[8];
12991
12992 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12993 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12994 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12995
12996 /* Handle MSR insn. */
12997 if (!(op1 & 0x2) && 0x38 == op)
12998 {
12999 if (!(op2 & 0x3))
13000 {
13001 /* CPSR is going to be changed. */
13002 record_buf[0] = ARM_PS_REGNUM;
13003 thumb2_insn_r->reg_rec_count = 1;
13004 }
13005 else
13006 {
13007 arm_record_unsupported_insn(thumb2_insn_r);
13008 return -1;
13009 }
13010 }
13011 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13012 {
13013 /* BLX. */
13014 record_buf[0] = ARM_PS_REGNUM;
13015 record_buf[1] = ARM_LR_REGNUM;
13016 thumb2_insn_r->reg_rec_count = 2;
13017 }
13018
13019 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13020 record_buf);
13021 return ARM_RECORD_SUCCESS;
13022 }
13023
13024 /* Handler for thumb2 store single data item instructions. */
13025
13026 static int
13027 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13028 {
13029 struct regcache *reg_cache = thumb2_insn_r->regcache;
13030
13031 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13032 uint32_t address, offset_addr;
13033 uint32_t record_buf[8], record_buf_mem[8];
13034 uint32_t op1, op2;
13035
13036 ULONGEST u_regval[2];
13037
13038 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13039 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13040 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13041 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13042
13043 if (bit (thumb2_insn_r->arm_insn, 23))
13044 {
13045 /* T2 encoding. */
13046 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13047 offset_addr = u_regval[0] + offset_imm;
13048 address = offset_addr;
13049 }
13050 else
13051 {
13052 /* T3 encoding. */
13053 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13054 {
13055 /* Handle STRB (register). */
13056 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13057 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13058 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13059 offset_addr = u_regval[1] << shift_imm;
13060 address = u_regval[0] + offset_addr;
13061 }
13062 else
13063 {
13064 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13065 if (bit (thumb2_insn_r->arm_insn, 10))
13066 {
13067 if (bit (thumb2_insn_r->arm_insn, 9))
13068 offset_addr = u_regval[0] + offset_imm;
13069 else
13070 offset_addr = u_regval[0] - offset_imm;
13071
13072 address = offset_addr;
13073 }
13074 else
13075 address = u_regval[0];
13076 }
13077 }
13078
13079 switch (op1)
13080 {
13081 /* Store byte instructions. */
13082 case 4:
13083 case 0:
13084 record_buf_mem[0] = 1;
13085 break;
13086 /* Store half word instructions. */
13087 case 1:
13088 case 5:
13089 record_buf_mem[0] = 2;
13090 break;
13091 /* Store word instructions. */
13092 case 2:
13093 case 6:
13094 record_buf_mem[0] = 4;
13095 break;
13096
13097 default:
13098 gdb_assert_not_reached ("no decoding pattern found");
13099 break;
13100 }
13101
13102 record_buf_mem[1] = address;
13103 thumb2_insn_r->mem_rec_count = 1;
13104 record_buf[0] = reg_rn;
13105 thumb2_insn_r->reg_rec_count = 1;
13106
13107 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13108 record_buf);
13109 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13110 record_buf_mem);
13111 return ARM_RECORD_SUCCESS;
13112 }
13113
13114 /* Handler for thumb2 load memory hints instructions. */
13115
13116 static int
13117 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13118 {
13119 uint32_t record_buf[8];
13120 uint32_t reg_rt, reg_rn;
13121
13122 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13123 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13124
13125 if (ARM_PC_REGNUM != reg_rt)
13126 {
13127 record_buf[0] = reg_rt;
13128 record_buf[1] = reg_rn;
13129 record_buf[2] = ARM_PS_REGNUM;
13130 thumb2_insn_r->reg_rec_count = 3;
13131
13132 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13133 record_buf);
13134 return ARM_RECORD_SUCCESS;
13135 }
13136
13137 return ARM_RECORD_FAILURE;
13138 }
13139
13140 /* Handler for thumb2 load word instructions. */
13141
13142 static int
13143 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13144 {
13145 uint32_t record_buf[8];
13146
13147 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13148 record_buf[1] = ARM_PS_REGNUM;
13149 thumb2_insn_r->reg_rec_count = 2;
13150
13151 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13152 record_buf);
13153 return ARM_RECORD_SUCCESS;
13154 }
13155
13156 /* Handler for thumb2 long multiply, long multiply accumulate, and
13157 divide instructions. */
13158
13159 static int
13160 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13161 {
13162 uint32_t opcode1 = 0, opcode2 = 0;
13163 uint32_t record_buf[8];
13164
13165 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13166 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13167
13168 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13169 {
13170 /* Handle SMULL, UMULL, SMULAL. */
13171 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13172 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13173 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13174 record_buf[2] = ARM_PS_REGNUM;
13175 thumb2_insn_r->reg_rec_count = 3;
13176 }
13177 else if (1 == opcode1 || 3 == opcode2)
13178 {
13179 /* Handle SDIV and UDIV. */
13180 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13181 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13182 record_buf[2] = ARM_PS_REGNUM;
13183 thumb2_insn_r->reg_rec_count = 3;
13184 }
13185 else
13186 return ARM_RECORD_FAILURE;
13187
13188 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13189 record_buf);
13190 return ARM_RECORD_SUCCESS;
13191 }
13192
13193 /* Record handler for thumb32 coprocessor instructions. */
13194
13195 static int
13196 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13197 {
13198 if (bit (thumb2_insn_r->arm_insn, 25))
13199 return arm_record_coproc_data_proc (thumb2_insn_r);
13200 else
13201 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13202 }
13203
13204 /* Record handler for advance SIMD structure load/store instructions. */
13205
13206 static int
13207 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13208 {
13209 struct regcache *reg_cache = thumb2_insn_r->regcache;
13210 uint32_t l_bit, a_bit, b_bits;
13211 uint32_t record_buf[128], record_buf_mem[128];
13212 uint32_t reg_rn, reg_vd, address, f_elem;
13213 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13214 uint8_t f_ebytes;
13215
13216 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13217 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13218 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13219 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13220 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13221 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13222 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13223 f_elem = 8 / f_ebytes;
13224
13225 if (!l_bit)
13226 {
13227 ULONGEST u_regval = 0;
13228 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13229 address = u_regval;
13230
13231 if (!a_bit)
13232 {
13233 /* Handle VST1. */
13234 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13235 {
13236 if (b_bits == 0x07)
13237 bf_regs = 1;
13238 else if (b_bits == 0x0a)
13239 bf_regs = 2;
13240 else if (b_bits == 0x06)
13241 bf_regs = 3;
13242 else if (b_bits == 0x02)
13243 bf_regs = 4;
13244 else
13245 bf_regs = 0;
13246
13247 for (index_r = 0; index_r < bf_regs; index_r++)
13248 {
13249 for (index_e = 0; index_e < f_elem; index_e++)
13250 {
13251 record_buf_mem[index_m++] = f_ebytes;
13252 record_buf_mem[index_m++] = address;
13253 address = address + f_ebytes;
13254 thumb2_insn_r->mem_rec_count += 1;
13255 }
13256 }
13257 }
13258 /* Handle VST2. */
13259 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13260 {
13261 if (b_bits == 0x09 || b_bits == 0x08)
13262 bf_regs = 1;
13263 else if (b_bits == 0x03)
13264 bf_regs = 2;
13265 else
13266 bf_regs = 0;
13267
13268 for (index_r = 0; index_r < bf_regs; index_r++)
13269 for (index_e = 0; index_e < f_elem; index_e++)
13270 {
13271 for (loop_t = 0; loop_t < 2; loop_t++)
13272 {
13273 record_buf_mem[index_m++] = f_ebytes;
13274 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13275 thumb2_insn_r->mem_rec_count += 1;
13276 }
13277 address = address + (2 * f_ebytes);
13278 }
13279 }
13280 /* Handle VST3. */
13281 else if ((b_bits & 0x0e) == 0x04)
13282 {
13283 for (index_e = 0; index_e < f_elem; index_e++)
13284 {
13285 for (loop_t = 0; loop_t < 3; loop_t++)
13286 {
13287 record_buf_mem[index_m++] = f_ebytes;
13288 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13289 thumb2_insn_r->mem_rec_count += 1;
13290 }
13291 address = address + (3 * f_ebytes);
13292 }
13293 }
13294 /* Handle VST4. */
13295 else if (!(b_bits & 0x0e))
13296 {
13297 for (index_e = 0; index_e < f_elem; index_e++)
13298 {
13299 for (loop_t = 0; loop_t < 4; loop_t++)
13300 {
13301 record_buf_mem[index_m++] = f_ebytes;
13302 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13303 thumb2_insn_r->mem_rec_count += 1;
13304 }
13305 address = address + (4 * f_ebytes);
13306 }
13307 }
13308 }
13309 else
13310 {
13311 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13312
13313 if (bft_size == 0x00)
13314 f_ebytes = 1;
13315 else if (bft_size == 0x01)
13316 f_ebytes = 2;
13317 else if (bft_size == 0x02)
13318 f_ebytes = 4;
13319 else
13320 f_ebytes = 0;
13321
13322 /* Handle VST1. */
13323 if (!(b_bits & 0x0b) || b_bits == 0x08)
13324 thumb2_insn_r->mem_rec_count = 1;
13325 /* Handle VST2. */
13326 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13327 thumb2_insn_r->mem_rec_count = 2;
13328 /* Handle VST3. */
13329 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13330 thumb2_insn_r->mem_rec_count = 3;
13331 /* Handle VST4. */
13332 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13333 thumb2_insn_r->mem_rec_count = 4;
13334
13335 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13336 {
13337 record_buf_mem[index_m] = f_ebytes;
13338 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13339 }
13340 }
13341 }
13342 else
13343 {
13344 if (!a_bit)
13345 {
13346 /* Handle VLD1. */
13347 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13348 thumb2_insn_r->reg_rec_count = 1;
13349 /* Handle VLD2. */
13350 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13351 thumb2_insn_r->reg_rec_count = 2;
13352 /* Handle VLD3. */
13353 else if ((b_bits & 0x0e) == 0x04)
13354 thumb2_insn_r->reg_rec_count = 3;
13355 /* Handle VLD4. */
13356 else if (!(b_bits & 0x0e))
13357 thumb2_insn_r->reg_rec_count = 4;
13358 }
13359 else
13360 {
13361 /* Handle VLD1. */
13362 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13363 thumb2_insn_r->reg_rec_count = 1;
13364 /* Handle VLD2. */
13365 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13366 thumb2_insn_r->reg_rec_count = 2;
13367 /* Handle VLD3. */
13368 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13369 thumb2_insn_r->reg_rec_count = 3;
13370 /* Handle VLD4. */
13371 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13372 thumb2_insn_r->reg_rec_count = 4;
13373
13374 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13375 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13376 }
13377 }
13378
13379 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13380 {
13381 record_buf[index_r] = reg_rn;
13382 thumb2_insn_r->reg_rec_count += 1;
13383 }
13384
13385 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13386 record_buf);
13387 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13388 record_buf_mem);
13389 return 0;
13390 }
13391
13392 /* Decodes thumb2 instruction type and invokes its record handler. */
13393
13394 static unsigned int
13395 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13396 {
13397 uint32_t op, op1, op2;
13398
13399 op = bit (thumb2_insn_r->arm_insn, 15);
13400 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13401 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13402
13403 if (op1 == 0x01)
13404 {
13405 if (!(op2 & 0x64 ))
13406 {
13407 /* Load/store multiple instruction. */
13408 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13409 }
13410 else if ((op2 & 0x64) == 0x4)
13411 {
13412 /* Load/store (dual/exclusive) and table branch instruction. */
13413 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13414 }
13415 else if ((op2 & 0x60) == 0x20)
13416 {
13417 /* Data-processing (shifted register). */
13418 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13419 }
13420 else if (op2 & 0x40)
13421 {
13422 /* Co-processor instructions. */
13423 return thumb2_record_coproc_insn (thumb2_insn_r);
13424 }
13425 }
13426 else if (op1 == 0x02)
13427 {
13428 if (op)
13429 {
13430 /* Branches and miscellaneous control instructions. */
13431 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13432 }
13433 else if (op2 & 0x20)
13434 {
13435 /* Data-processing (plain binary immediate) instruction. */
13436 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13437 }
13438 else
13439 {
13440 /* Data-processing (modified immediate). */
13441 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13442 }
13443 }
13444 else if (op1 == 0x03)
13445 {
13446 if (!(op2 & 0x71 ))
13447 {
13448 /* Store single data item. */
13449 return thumb2_record_str_single_data (thumb2_insn_r);
13450 }
13451 else if (!((op2 & 0x71) ^ 0x10))
13452 {
13453 /* Advanced SIMD or structure load/store instructions. */
13454 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13455 }
13456 else if (!((op2 & 0x67) ^ 0x01))
13457 {
13458 /* Load byte, memory hints instruction. */
13459 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13460 }
13461 else if (!((op2 & 0x67) ^ 0x03))
13462 {
13463 /* Load halfword, memory hints instruction. */
13464 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13465 }
13466 else if (!((op2 & 0x67) ^ 0x05))
13467 {
13468 /* Load word instruction. */
13469 return thumb2_record_ld_word (thumb2_insn_r);
13470 }
13471 else if (!((op2 & 0x70) ^ 0x20))
13472 {
13473 /* Data-processing (register) instruction. */
13474 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13475 }
13476 else if (!((op2 & 0x78) ^ 0x30))
13477 {
13478 /* Multiply, multiply accumulate, abs diff instruction. */
13479 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13480 }
13481 else if (!((op2 & 0x78) ^ 0x38))
13482 {
13483 /* Long multiply, long multiply accumulate, and divide. */
13484 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13485 }
13486 else if (op2 & 0x40)
13487 {
13488 /* Co-processor instructions. */
13489 return thumb2_record_coproc_insn (thumb2_insn_r);
13490 }
13491 }
13492
13493 return -1;
13494 }
13495
13496 namespace {
13497 /* Abstract memory reader. */
13498
13499 class abstract_memory_reader
13500 {
13501 public:
13502 /* Read LEN bytes of target memory at address MEMADDR, placing the
13503 results in GDB's memory at BUF. Return true on success. */
13504
13505 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13506 };
13507
13508 /* Instruction reader from real target. */
13509
13510 class instruction_reader : public abstract_memory_reader
13511 {
13512 public:
13513 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13514 {
13515 if (target_read_memory (memaddr, buf, len))
13516 return false;
13517 else
13518 return true;
13519 }
13520 };
13521
13522 } // namespace
13523
13524 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13525 and positive val on failure. */
13526
13527 static int
13528 extract_arm_insn (abstract_memory_reader& reader,
13529 insn_decode_record *insn_record, uint32_t insn_size)
13530 {
13531 gdb_byte buf[insn_size];
13532
13533 memset (&buf[0], 0, insn_size);
13534
13535 if (!reader.read (insn_record->this_addr, buf, insn_size))
13536 return 1;
13537 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13538 insn_size,
13539 gdbarch_byte_order_for_code (insn_record->gdbarch));
13540 return 0;
13541 }
13542
13543 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13544
13545 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13546 dispatch it. */
13547
13548 static int
13549 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13550 record_type_t record_type, uint32_t insn_size)
13551 {
13552
13553 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13554 instruction. */
13555 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13556 {
13557 arm_record_data_proc_misc_ld_str, /* 000. */
13558 arm_record_data_proc_imm, /* 001. */
13559 arm_record_ld_st_imm_offset, /* 010. */
13560 arm_record_ld_st_reg_offset, /* 011. */
13561 arm_record_ld_st_multiple, /* 100. */
13562 arm_record_b_bl, /* 101. */
13563 arm_record_asimd_vfp_coproc, /* 110. */
13564 arm_record_coproc_data_proc /* 111. */
13565 };
13566
13567 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13568 instruction. */
13569 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13570 { \
13571 thumb_record_shift_add_sub, /* 000. */
13572 thumb_record_add_sub_cmp_mov, /* 001. */
13573 thumb_record_ld_st_reg_offset, /* 010. */
13574 thumb_record_ld_st_imm_offset, /* 011. */
13575 thumb_record_ld_st_stack, /* 100. */
13576 thumb_record_misc, /* 101. */
13577 thumb_record_ldm_stm_swi, /* 110. */
13578 thumb_record_branch /* 111. */
13579 };
13580
13581 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13582 uint32_t insn_id = 0;
13583
13584 if (extract_arm_insn (reader, arm_record, insn_size))
13585 {
13586 if (record_debug)
13587 {
13588 gdb_printf (gdb_stdlog,
13589 _("Process record: error reading memory at "
13590 "addr %s len = %d.\n"),
13591 paddress (arm_record->gdbarch,
13592 arm_record->this_addr), insn_size);
13593 }
13594 return -1;
13595 }
13596 else if (ARM_RECORD == record_type)
13597 {
13598 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13599 insn_id = bits (arm_record->arm_insn, 25, 27);
13600
13601 if (arm_record->cond == 0xf)
13602 ret = arm_record_extension_space (arm_record);
13603 else
13604 {
13605 /* If this insn has fallen into extension space
13606 then we need not decode it anymore. */
13607 ret = arm_handle_insn[insn_id] (arm_record);
13608 }
13609 if (ret != ARM_RECORD_SUCCESS)
13610 {
13611 arm_record_unsupported_insn (arm_record);
13612 ret = -1;
13613 }
13614 }
13615 else if (THUMB_RECORD == record_type)
13616 {
13617 /* As thumb does not have condition codes, we set negative. */
13618 arm_record->cond = -1;
13619 insn_id = bits (arm_record->arm_insn, 13, 15);
13620 ret = thumb_handle_insn[insn_id] (arm_record);
13621 if (ret != ARM_RECORD_SUCCESS)
13622 {
13623 arm_record_unsupported_insn (arm_record);
13624 ret = -1;
13625 }
13626 }
13627 else if (THUMB2_RECORD == record_type)
13628 {
13629 /* As thumb does not have condition codes, we set negative. */
13630 arm_record->cond = -1;
13631
13632 /* Swap first half of 32bit thumb instruction with second half. */
13633 arm_record->arm_insn
13634 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13635
13636 ret = thumb2_record_decode_insn_handler (arm_record);
13637
13638 if (ret != ARM_RECORD_SUCCESS)
13639 {
13640 arm_record_unsupported_insn (arm_record);
13641 ret = -1;
13642 }
13643 }
13644 else
13645 {
13646 /* Throw assertion. */
13647 gdb_assert_not_reached ("not a valid instruction, could not decode");
13648 }
13649
13650 return ret;
13651 }
13652
13653 #if GDB_SELF_TEST
13654 namespace selftests {
13655
13656 /* Provide both 16-bit and 32-bit thumb instructions. */
13657
13658 class instruction_reader_thumb : public abstract_memory_reader
13659 {
13660 public:
13661 template<size_t SIZE>
13662 instruction_reader_thumb (enum bfd_endian endian,
13663 const uint16_t (&insns)[SIZE])
13664 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13665 {}
13666
13667 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13668 {
13669 SELF_CHECK (len == 4 || len == 2);
13670 SELF_CHECK (memaddr % 2 == 0);
13671 SELF_CHECK ((memaddr / 2) < m_insns_size);
13672
13673 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13674 if (len == 4)
13675 {
13676 store_unsigned_integer (&buf[2], 2, m_endian,
13677 m_insns[memaddr / 2 + 1]);
13678 }
13679 return true;
13680 }
13681
13682 private:
13683 enum bfd_endian m_endian;
13684 const uint16_t *m_insns;
13685 size_t m_insns_size;
13686 };
13687
13688 static void
13689 arm_record_test (void)
13690 {
13691 struct gdbarch_info info;
13692 info.bfd_arch_info = bfd_scan_arch ("arm");
13693
13694 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13695
13696 SELF_CHECK (gdbarch != NULL);
13697
13698 /* 16-bit Thumb instructions. */
13699 {
13700 insn_decode_record arm_record;
13701
13702 memset (&arm_record, 0, sizeof (insn_decode_record));
13703 arm_record.gdbarch = gdbarch;
13704
13705 static const uint16_t insns[] = {
13706 /* db b2 uxtb r3, r3 */
13707 0xb2db,
13708 /* cd 58 ldr r5, [r1, r3] */
13709 0x58cd,
13710 };
13711
13712 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13713 instruction_reader_thumb reader (endian, insns);
13714 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13715 THUMB_INSN_SIZE_BYTES);
13716
13717 SELF_CHECK (ret == 0);
13718 SELF_CHECK (arm_record.mem_rec_count == 0);
13719 SELF_CHECK (arm_record.reg_rec_count == 1);
13720 SELF_CHECK (arm_record.arm_regs[0] == 3);
13721
13722 arm_record.this_addr += 2;
13723 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13724 THUMB_INSN_SIZE_BYTES);
13725
13726 SELF_CHECK (ret == 0);
13727 SELF_CHECK (arm_record.mem_rec_count == 0);
13728 SELF_CHECK (arm_record.reg_rec_count == 1);
13729 SELF_CHECK (arm_record.arm_regs[0] == 5);
13730 }
13731
13732 /* 32-bit Thumb-2 instructions. */
13733 {
13734 insn_decode_record arm_record;
13735
13736 memset (&arm_record, 0, sizeof (insn_decode_record));
13737 arm_record.gdbarch = gdbarch;
13738
13739 static const uint16_t insns[] = {
13740 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13741 0xee1d, 0x7f70,
13742 };
13743
13744 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13745 instruction_reader_thumb reader (endian, insns);
13746 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13747 THUMB2_INSN_SIZE_BYTES);
13748
13749 SELF_CHECK (ret == 0);
13750 SELF_CHECK (arm_record.mem_rec_count == 0);
13751 SELF_CHECK (arm_record.reg_rec_count == 1);
13752 SELF_CHECK (arm_record.arm_regs[0] == 7);
13753 }
13754 }
13755
13756 /* Instruction reader from manually cooked instruction sequences. */
13757
13758 class test_arm_instruction_reader : public arm_instruction_reader
13759 {
13760 public:
13761 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13762 : m_insns (insns)
13763 {}
13764
13765 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13766 {
13767 SELF_CHECK (memaddr % 4 == 0);
13768 SELF_CHECK (memaddr / 4 < m_insns.size ());
13769
13770 return m_insns[memaddr / 4];
13771 }
13772
13773 private:
13774 const gdb::array_view<const uint32_t> m_insns;
13775 };
13776
13777 static void
13778 arm_analyze_prologue_test ()
13779 {
13780 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13781 {
13782 struct gdbarch_info info;
13783 info.byte_order = endianness;
13784 info.byte_order_for_code = endianness;
13785 info.bfd_arch_info = bfd_scan_arch ("arm");
13786
13787 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13788
13789 SELF_CHECK (gdbarch != NULL);
13790
13791 /* The "sub" instruction contains an immediate value rotate count of 0,
13792 which resulted in a 32-bit shift of a 32-bit value, caught by
13793 UBSan. */
13794 const uint32_t insns[] = {
13795 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13796 0xe1a05000, /* mov r5, r0 */
13797 0xe5903020, /* ldr r3, [r0, #32] */
13798 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13799 };
13800
13801 test_arm_instruction_reader mem_reader (insns);
13802 arm_prologue_cache cache;
13803 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13804
13805 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13806 }
13807 }
13808
13809 } // namespace selftests
13810 #endif /* GDB_SELF_TEST */
13811
13812 /* Cleans up local record registers and memory allocations. */
13813
13814 static void
13815 deallocate_reg_mem (insn_decode_record *record)
13816 {
13817 xfree (record->arm_regs);
13818 xfree (record->arm_mems);
13819 }
13820
13821
13822 /* Parse the current instruction and record the values of the registers and
13823 memory that will be changed in current instruction to record_arch_list".
13824 Return -1 if something is wrong. */
13825
13826 int
13827 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13828 CORE_ADDR insn_addr)
13829 {
13830
13831 uint32_t no_of_rec = 0;
13832 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13833 ULONGEST t_bit = 0, insn_id = 0;
13834
13835 ULONGEST u_regval = 0;
13836
13837 insn_decode_record arm_record;
13838
13839 memset (&arm_record, 0, sizeof (insn_decode_record));
13840 arm_record.regcache = regcache;
13841 arm_record.this_addr = insn_addr;
13842 arm_record.gdbarch = gdbarch;
13843
13844
13845 if (record_debug > 1)
13846 {
13847 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
13848 "addr = %s\n",
13849 paddress (gdbarch, arm_record.this_addr));
13850 }
13851
13852 instruction_reader reader;
13853 if (extract_arm_insn (reader, &arm_record, 2))
13854 {
13855 if (record_debug)
13856 {
13857 gdb_printf (gdb_stdlog,
13858 _("Process record: error reading memory at "
13859 "addr %s len = %d.\n"),
13860 paddress (arm_record.gdbarch,
13861 arm_record.this_addr), 2);
13862 }
13863 return -1;
13864 }
13865
13866 /* Check the insn, whether it is thumb or arm one. */
13867
13868 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13869 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13870
13871
13872 if (!(u_regval & t_bit))
13873 {
13874 /* We are decoding arm insn. */
13875 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13876 }
13877 else
13878 {
13879 insn_id = bits (arm_record.arm_insn, 11, 15);
13880 /* is it thumb2 insn? */
13881 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13882 {
13883 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13884 THUMB2_INSN_SIZE_BYTES);
13885 }
13886 else
13887 {
13888 /* We are decoding thumb insn. */
13889 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13890 THUMB_INSN_SIZE_BYTES);
13891 }
13892 }
13893
13894 if (0 == ret)
13895 {
13896 /* Record registers. */
13897 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13898 if (arm_record.arm_regs)
13899 {
13900 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13901 {
13902 if (record_full_arch_list_add_reg
13903 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13904 ret = -1;
13905 }
13906 }
13907 /* Record memories. */
13908 if (arm_record.arm_mems)
13909 {
13910 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13911 {
13912 if (record_full_arch_list_add_mem
13913 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13914 arm_record.arm_mems[no_of_rec].len))
13915 ret = -1;
13916 }
13917 }
13918
13919 if (record_full_arch_list_add_end ())
13920 ret = -1;
13921 }
13922
13923
13924 deallocate_reg_mem (&arm_record);
13925
13926 return ret;
13927 }
13928
13929 /* See arm-tdep.h. */
13930
13931 const target_desc *
13932 arm_read_description (arm_fp_type fp_type)
13933 {
13934 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13935
13936 if (tdesc == nullptr)
13937 {
13938 tdesc = arm_create_target_description (fp_type);
13939 tdesc_arm_list[fp_type] = tdesc;
13940 }
13941
13942 return tdesc;
13943 }
13944
13945 /* See arm-tdep.h. */
13946
13947 const target_desc *
13948 arm_read_mprofile_description (arm_m_profile_type m_type)
13949 {
13950 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13951
13952 if (tdesc == nullptr)
13953 {
13954 tdesc = arm_create_mprofile_target_description (m_type);
13955 tdesc_arm_mprofile_list[m_type] = tdesc;
13956 }
13957
13958 return tdesc;
13959 }