gdb: fix gdbarch_tdep ODR violation
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Print an "arm" debug statement. */
72
73 #define arm_debug_printf(fmt, ...) \
74 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
75
76 /* Macros for setting and testing a bit in a minimal symbol that marks
77 it as Thumb function. The MSB of the minimal symbol's "info" field
78 is used for this purpose.
79
80 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
81 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
82
83 #define MSYMBOL_SET_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym) = 1
85
86 #define MSYMBOL_IS_SPECIAL(msym) \
87 MSYMBOL_TARGET_FLAG_1 (msym)
88
89 struct arm_mapping_symbol
90 {
91 CORE_ADDR value;
92 char type;
93
94 bool operator< (const arm_mapping_symbol &other) const
95 { return this->value < other.value; }
96 };
97
98 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
99
100 struct arm_per_bfd
101 {
102 explicit arm_per_bfd (size_t num_sections)
103 : section_maps (new arm_mapping_symbol_vec[num_sections]),
104 section_maps_sorted (new bool[num_sections] ())
105 {}
106
107 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
108
109 /* Information about mapping symbols ($a, $d, $t) in the objfile.
110
111 The format is an array of vectors of arm_mapping_symbols, there is one
112 vector for each section of the objfile (the array is index by BFD section
113 index).
114
115 For each section, the vector of arm_mapping_symbol is sorted by
116 symbol value (address). */
117 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
118
119 /* For each corresponding element of section_maps above, is this vector
120 sorted. */
121 std::unique_ptr<bool[]> section_maps_sorted;
122 };
123
124 /* Per-bfd data used for mapping symbols. */
125 static bfd_key<arm_per_bfd> arm_bfd_data_key;
126
127 /* The list of available "set arm ..." and "show arm ..." commands. */
128 static struct cmd_list_element *setarmcmdlist = NULL;
129 static struct cmd_list_element *showarmcmdlist = NULL;
130
131 /* The type of floating-point to use. Keep this in sync with enum
132 arm_float_model, and the help string in _initialize_arm_tdep. */
133 static const char *const fp_model_strings[] =
134 {
135 "auto",
136 "softfpa",
137 "fpa",
138 "softvfp",
139 "vfp",
140 NULL
141 };
142
143 /* A variable that can be configured by the user. */
144 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
145 static const char *current_fp_model = "auto";
146
147 /* The ABI to use. Keep this in sync with arm_abi_kind. */
148 static const char *const arm_abi_strings[] =
149 {
150 "auto",
151 "APCS",
152 "AAPCS",
153 NULL
154 };
155
156 /* A variable that can be configured by the user. */
157 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
158 static const char *arm_abi_string = "auto";
159
160 /* The execution mode to assume. */
161 static const char *const arm_mode_strings[] =
162 {
163 "auto",
164 "arm",
165 "thumb",
166 NULL
167 };
168
169 static const char *arm_fallback_mode_string = "auto";
170 static const char *arm_force_mode_string = "auto";
171
172 /* The standard register names, and all the valid aliases for them. Note
173 that `fp', `sp' and `pc' are not added in this alias list, because they
174 have been added as builtin user registers in
175 std-regs.c:_initialize_frame_reg. */
176 static const struct
177 {
178 const char *name;
179 int regnum;
180 } arm_register_aliases[] = {
181 /* Basic register numbers. */
182 { "r0", 0 },
183 { "r1", 1 },
184 { "r2", 2 },
185 { "r3", 3 },
186 { "r4", 4 },
187 { "r5", 5 },
188 { "r6", 6 },
189 { "r7", 7 },
190 { "r8", 8 },
191 { "r9", 9 },
192 { "r10", 10 },
193 { "r11", 11 },
194 { "r12", 12 },
195 { "r13", 13 },
196 { "r14", 14 },
197 { "r15", 15 },
198 /* Synonyms (argument and variable registers). */
199 { "a1", 0 },
200 { "a2", 1 },
201 { "a3", 2 },
202 { "a4", 3 },
203 { "v1", 4 },
204 { "v2", 5 },
205 { "v3", 6 },
206 { "v4", 7 },
207 { "v5", 8 },
208 { "v6", 9 },
209 { "v7", 10 },
210 { "v8", 11 },
211 /* Other platform-specific names for r9. */
212 { "sb", 9 },
213 { "tr", 9 },
214 /* Special names. */
215 { "ip", 12 },
216 { "lr", 14 },
217 /* Names used by GCC (not listed in the ARM EABI). */
218 { "sl", 10 },
219 /* A special name from the older ATPCS. */
220 { "wr", 7 },
221 };
222
223 static const char *const arm_register_names[] =
224 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
225 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
226 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
227 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
228 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
229 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
230 "fps", "cpsr" }; /* 24 25 */
231
232 /* Holds the current set of options to be passed to the disassembler. */
233 static char *arm_disassembler_options;
234
235 /* Valid register name styles. */
236 static const char **valid_disassembly_styles;
237
238 /* Disassembly style to use. Default to "std" register names. */
239 static const char *disassembly_style;
240
241 /* All possible arm target descriptors. */
242 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
243 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
244
245 /* This is used to keep the bfd arch_info in sync with the disassembly
246 style. */
247 static void set_disassembly_style_sfunc (const char *, int,
248 struct cmd_list_element *);
249 static void show_disassembly_style_sfunc (struct ui_file *, int,
250 struct cmd_list_element *,
251 const char *);
252
253 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
254 readable_regcache *regcache,
255 int regnum, gdb_byte *buf);
256 static void arm_neon_quad_write (struct gdbarch *gdbarch,
257 struct regcache *regcache,
258 int regnum, const gdb_byte *buf);
259
260 static CORE_ADDR
261 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
262
263
264 /* get_next_pcs operations. */
265 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
266 arm_get_next_pcs_read_memory_unsigned_integer,
267 arm_get_next_pcs_syscall_next_pc,
268 arm_get_next_pcs_addr_bits_remove,
269 arm_get_next_pcs_is_thumb,
270 NULL,
271 };
272
273 struct arm_prologue_cache
274 {
275 /* The stack pointer at the time this frame was created; i.e. the
276 caller's stack pointer when this function was called. It is used
277 to identify this frame. */
278 CORE_ADDR prev_sp;
279
280 /* The frame base for this frame is just prev_sp - frame size.
281 FRAMESIZE is the distance from the frame pointer to the
282 initial stack pointer. */
283
284 int framesize;
285
286 /* The register used to hold the frame pointer for this frame. */
287 int framereg;
288
289 /* Saved register offsets. */
290 trad_frame_saved_reg *saved_regs;
291 };
292
293 namespace {
294
295 /* Abstract class to read ARM instructions from memory. */
296
297 class arm_instruction_reader
298 {
299 public:
300 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
301 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
302 };
303
304 /* Read instructions from target memory. */
305
306 class target_arm_instruction_reader : public arm_instruction_reader
307 {
308 public:
309 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
310 {
311 return read_code_unsigned_integer (memaddr, 4, byte_order);
312 }
313 };
314
315 } /* namespace */
316
317 static CORE_ADDR arm_analyze_prologue
318 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
319 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
320
321 /* Architecture version for displaced stepping. This effects the behaviour of
322 certain instructions, and really should not be hard-wired. */
323
324 #define DISPLACED_STEPPING_ARCH_VERSION 5
325
326 /* See arm-tdep.h. */
327
328 bool arm_apcs_32 = true;
329
330 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
331
332 int
333 arm_psr_thumb_bit (struct gdbarch *gdbarch)
334 {
335 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
336
337 if (tdep->is_m)
338 return XPSR_T;
339 else
340 return CPSR_T;
341 }
342
343 /* Determine if the processor is currently executing in Thumb mode. */
344
345 int
346 arm_is_thumb (struct regcache *regcache)
347 {
348 ULONGEST cpsr;
349 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
350
351 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
352
353 return (cpsr & t_bit) != 0;
354 }
355
356 /* Determine if FRAME is executing in Thumb mode. */
357
358 int
359 arm_frame_is_thumb (struct frame_info *frame)
360 {
361 CORE_ADDR cpsr;
362 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
363
364 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
365 directly (from a signal frame or dummy frame) or by interpreting
366 the saved LR (from a prologue or DWARF frame). So consult it and
367 trust the unwinders. */
368 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
369
370 return (cpsr & t_bit) != 0;
371 }
372
373 /* Search for the mapping symbol covering MEMADDR. If one is found,
374 return its type. Otherwise, return 0. If START is non-NULL,
375 set *START to the location of the mapping symbol. */
376
377 static char
378 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
379 {
380 struct obj_section *sec;
381
382 /* If there are mapping symbols, consult them. */
383 sec = find_pc_section (memaddr);
384 if (sec != NULL)
385 {
386 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
387 if (data != NULL)
388 {
389 unsigned int section_idx = sec->the_bfd_section->index;
390 arm_mapping_symbol_vec &map
391 = data->section_maps[section_idx];
392
393 /* Sort the vector on first use. */
394 if (!data->section_maps_sorted[section_idx])
395 {
396 std::sort (map.begin (), map.end ());
397 data->section_maps_sorted[section_idx] = true;
398 }
399
400 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
401 arm_mapping_symbol_vec::const_iterator it
402 = std::lower_bound (map.begin (), map.end (), map_key);
403
404 /* std::lower_bound finds the earliest ordered insertion
405 point. If the symbol at this position starts at this exact
406 address, we use that; otherwise, the preceding
407 mapping symbol covers this address. */
408 if (it < map.end ())
409 {
410 if (it->value == map_key.value)
411 {
412 if (start)
413 *start = it->value + sec->addr ();
414 return it->type;
415 }
416 }
417
418 if (it > map.begin ())
419 {
420 arm_mapping_symbol_vec::const_iterator prev_it
421 = it - 1;
422
423 if (start)
424 *start = prev_it->value + sec->addr ();
425 return prev_it->type;
426 }
427 }
428 }
429
430 return 0;
431 }
432
433 /* Determine if the program counter specified in MEMADDR is in a Thumb
434 function. This function should be called for addresses unrelated to
435 any executing frame; otherwise, prefer arm_frame_is_thumb. */
436
437 int
438 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
439 {
440 struct bound_minimal_symbol sym;
441 char type;
442 arm_displaced_step_copy_insn_closure *dsc = nullptr;
443 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
444
445 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
446 dsc = ((arm_displaced_step_copy_insn_closure * )
447 gdbarch_displaced_step_copy_insn_closure_by_addr
448 (gdbarch, current_inferior (), memaddr));
449
450 /* If checking the mode of displaced instruction in copy area, the mode
451 should be determined by instruction on the original address. */
452 if (dsc)
453 {
454 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
455 (unsigned long) dsc->insn_addr,
456 (unsigned long) memaddr);
457 memaddr = dsc->insn_addr;
458 }
459
460 /* If bit 0 of the address is set, assume this is a Thumb address. */
461 if (IS_THUMB_ADDR (memaddr))
462 return 1;
463
464 /* If the user wants to override the symbol table, let him. */
465 if (strcmp (arm_force_mode_string, "arm") == 0)
466 return 0;
467 if (strcmp (arm_force_mode_string, "thumb") == 0)
468 return 1;
469
470 /* ARM v6-M and v7-M are always in Thumb mode. */
471 if (tdep->is_m)
472 return 1;
473
474 /* If there are mapping symbols, consult them. */
475 type = arm_find_mapping_symbol (memaddr, NULL);
476 if (type)
477 return type == 't';
478
479 /* Thumb functions have a "special" bit set in minimal symbols. */
480 sym = lookup_minimal_symbol_by_pc (memaddr);
481 if (sym.minsym)
482 return (MSYMBOL_IS_SPECIAL (sym.minsym));
483
484 /* If the user wants to override the fallback mode, let them. */
485 if (strcmp (arm_fallback_mode_string, "arm") == 0)
486 return 0;
487 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
488 return 1;
489
490 /* If we couldn't find any symbol, but we're talking to a running
491 target, then trust the current value of $cpsr. This lets
492 "display/i $pc" always show the correct mode (though if there is
493 a symbol table we will not reach here, so it still may not be
494 displayed in the mode it will be executed). */
495 if (target_has_registers ())
496 return arm_frame_is_thumb (get_current_frame ());
497
498 /* Otherwise we're out of luck; we assume ARM. */
499 return 0;
500 }
501
502 /* Determine if the address specified equals any of these magic return
503 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
504 architectures.
505
506 From ARMv6-M Reference Manual B1.5.8
507 Table B1-5 Exception return behavior
508
509 EXC_RETURN Return To Return Stack
510 0xFFFFFFF1 Handler mode Main
511 0xFFFFFFF9 Thread mode Main
512 0xFFFFFFFD Thread mode Process
513
514 From ARMv7-M Reference Manual B1.5.8
515 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
516
517 EXC_RETURN Return To Return Stack
518 0xFFFFFFF1 Handler mode Main
519 0xFFFFFFF9 Thread mode Main
520 0xFFFFFFFD Thread mode Process
521
522 Table B1-9 EXC_RETURN definition of exception return behavior, with
523 FP
524
525 EXC_RETURN Return To Return Stack Frame Type
526 0xFFFFFFE1 Handler mode Main Extended
527 0xFFFFFFE9 Thread mode Main Extended
528 0xFFFFFFED Thread mode Process Extended
529 0xFFFFFFF1 Handler mode Main Basic
530 0xFFFFFFF9 Thread mode Main Basic
531 0xFFFFFFFD Thread mode Process Basic
532
533 For more details see "B1.5.8 Exception return behavior"
534 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
535
536 In the ARMv8-M Architecture Technical Reference also adds
537 for implementations without the Security Extension:
538
539 EXC_RETURN Condition
540 0xFFFFFFB0 Return to Handler mode.
541 0xFFFFFFB8 Return to Thread mode using the main stack.
542 0xFFFFFFBC Return to Thread mode using the process stack. */
543
544 static int
545 arm_m_addr_is_magic (CORE_ADDR addr)
546 {
547 switch (addr)
548 {
549 /* Values from ARMv8-M Architecture Technical Reference. */
550 case 0xffffffb0:
551 case 0xffffffb8:
552 case 0xffffffbc:
553 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
554 the exception return behavior. */
555 case 0xffffffe1:
556 case 0xffffffe9:
557 case 0xffffffed:
558 case 0xfffffff1:
559 case 0xfffffff9:
560 case 0xfffffffd:
561 /* Address is magic. */
562 return 1;
563
564 default:
565 /* Address is not magic. */
566 return 0;
567 }
568 }
569
570 /* Remove useless bits from addresses in a running program. */
571 static CORE_ADDR
572 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
573 {
574 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
575
576 /* On M-profile devices, do not strip the low bit from EXC_RETURN
577 (the magic exception return address). */
578 if (tdep->is_m && arm_m_addr_is_magic (val))
579 return val;
580
581 if (arm_apcs_32)
582 return UNMAKE_THUMB_ADDR (val);
583 else
584 return (val & 0x03fffffc);
585 }
586
587 /* Return 1 if PC is the start of a compiler helper function which
588 can be safely ignored during prologue skipping. IS_THUMB is true
589 if the function is known to be a Thumb function due to the way it
590 is being called. */
591 static int
592 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
593 {
594 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
595 struct bound_minimal_symbol msym;
596
597 msym = lookup_minimal_symbol_by_pc (pc);
598 if (msym.minsym != NULL
599 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
600 && msym.minsym->linkage_name () != NULL)
601 {
602 const char *name = msym.minsym->linkage_name ();
603
604 /* The GNU linker's Thumb call stub to foo is named
605 __foo_from_thumb. */
606 if (strstr (name, "_from_thumb") != NULL)
607 name += 2;
608
609 /* On soft-float targets, __truncdfsf2 is called to convert promoted
610 arguments to their argument types in non-prototyped
611 functions. */
612 if (startswith (name, "__truncdfsf2"))
613 return 1;
614 if (startswith (name, "__aeabi_d2f"))
615 return 1;
616
617 /* Internal functions related to thread-local storage. */
618 if (startswith (name, "__tls_get_addr"))
619 return 1;
620 if (startswith (name, "__aeabi_read_tp"))
621 return 1;
622 }
623 else
624 {
625 /* If we run against a stripped glibc, we may be unable to identify
626 special functions by name. Check for one important case,
627 __aeabi_read_tp, by comparing the *code* against the default
628 implementation (this is hand-written ARM assembler in glibc). */
629
630 if (!is_thumb
631 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
632 == 0xe3e00a0f /* mov r0, #0xffff0fff */
633 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
634 == 0xe240f01f) /* sub pc, r0, #31 */
635 return 1;
636 }
637
638 return 0;
639 }
640
641 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
642 the first 16-bit of instruction, and INSN2 is the second 16-bit of
643 instruction. */
644 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
645 ((bits ((insn1), 0, 3) << 12) \
646 | (bits ((insn1), 10, 10) << 11) \
647 | (bits ((insn2), 12, 14) << 8) \
648 | bits ((insn2), 0, 7))
649
650 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
651 the 32-bit instruction. */
652 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
653 ((bits ((insn), 16, 19) << 12) \
654 | bits ((insn), 0, 11))
655
656 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
657
658 static unsigned int
659 thumb_expand_immediate (unsigned int imm)
660 {
661 unsigned int count = imm >> 7;
662
663 if (count < 8)
664 switch (count / 2)
665 {
666 case 0:
667 return imm & 0xff;
668 case 1:
669 return (imm & 0xff) | ((imm & 0xff) << 16);
670 case 2:
671 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
672 case 3:
673 return (imm & 0xff) | ((imm & 0xff) << 8)
674 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
675 }
676
677 return (0x80 | (imm & 0x7f)) << (32 - count);
678 }
679
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
682
683 static int
684 thumb_instruction_restores_sp (unsigned short insn)
685 {
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
689 }
690
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
696
697 static CORE_ADDR
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
701 {
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 int i;
705 pv_t regs[16];
706 CORE_ADDR offset;
707 CORE_ADDR unrecognized_pc = 0;
708
709 for (i = 0; i < 16; i++)
710 regs[i] = pv_register (i, 0);
711 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
712
713 while (start < limit)
714 {
715 unsigned short insn;
716
717 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
718
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
720 {
721 int regno;
722 int mask;
723
724 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
725 break;
726
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
730
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
734 {
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
736 -4);
737 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
738 }
739 }
740 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
741 {
742 offset = (insn & 0x7f) << 2; /* get scaled offset */
743 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
744 -offset);
745 }
746 else if (thumb_instruction_restores_sp (insn))
747 {
748 /* Don't scan past the epilogue. */
749 break;
750 }
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
753 (insn & 0xff) << 2);
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
757 bits (insn, 6, 8));
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
761 bits (insn, 0, 7));
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
769 {
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
773 }
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
775 {
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
779 }
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
781 {
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
786 pv_t addr;
787
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
790
791 if (stack.store_would_trash (addr))
792 break;
793
794 stack.store (addr, 4, regs[regno]);
795 }
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
797 {
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
800 pv_t addr;
801
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
804
805 if (stack.store_would_trash (addr))
806 break;
807
808 stack.store (addr, 4, regs[rd]);
809 }
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
814 ;
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
819 ;
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
824 ;
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
829 ;
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
832 on Thumb. */
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
835 {
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
838 CORE_ADDR loc;
839
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
843 }
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
845 {
846 unsigned short inst2;
847
848 inst2 = read_code_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
850
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
852 {
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
856 CORE_ADDR nextpc;
857 int j1, j2, imm1, imm2;
858
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
863
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
866
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
871
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
874 break;
875 }
876
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
878 { registers } */
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 {
881 pv_t addr = regs[bits (insn, 0, 3)];
882 int regno;
883
884 if (stack.store_would_trash (addr))
885 break;
886
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
890 {
891 addr = pv_add_constant (addr, -4);
892 stack.store (addr, 4, regs[regno]);
893 }
894
895 if (insn & 0x0020)
896 regs[bits (insn, 0, 3)] = addr;
897 }
898
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
900 [Rn, #+/-imm]{!} */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
906
907 offset = inst2 & 0xff;
908 if (insn & 0x0080)
909 addr = pv_add_constant (addr, offset);
910 else
911 addr = pv_add_constant (addr, -offset);
912
913 if (stack.store_would_trash (addr))
914 break;
915
916 stack.store (addr, 4, regs[regno1]);
917 stack.store (pv_add_constant (addr, 4),
918 4, regs[regno2]);
919
920 if (insn & 0x0020)
921 regs[bits (insn, 0, 3)] = addr;
922 }
923
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
927 {
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
930
931 offset = inst2 & 0xff;
932 if (inst2 & 0x0200)
933 addr = pv_add_constant (addr, offset);
934 else
935 addr = pv_add_constant (addr, -offset);
936
937 if (stack.store_would_trash (addr))
938 break;
939
940 stack.store (addr, 4, regs[regno]);
941
942 if (inst2 & 0x0100)
943 regs[bits (insn, 0, 3)] = addr;
944 }
945
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 {
949 int regno = bits (inst2, 12, 15);
950 pv_t addr;
951
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
954
955 if (stack.store_would_trash (addr))
956 break;
957
958 stack.store (addr, 4, regs[regno]);
959 }
960
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
964 ;
965
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
970 ;
971
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
973 { registers } */
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
978 ;
979
980 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
981 [Rn, #+/-imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
984 ;
985
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
990 ;
991
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
995 ;
996
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
999 {
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1003
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1011 {
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1015
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1018 }
1019
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1022 {
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1026
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1030 }
1031
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1034 {
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1038
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1041 }
1042
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1044 {
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1048
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1051 }
1052
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1054 {
1055 unsigned int imm
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1057
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1059 }
1060
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1063 {
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1067 }
1068
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1070 {
1071 /* Constant pool loads. */
1072 unsigned int constant;
1073 CORE_ADDR loc;
1074
1075 offset = bits (inst2, 0, 11);
1076 if (insn & 0x0080)
1077 loc = start + 4 + offset;
1078 else
1079 loc = start + 4 - offset;
1080
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1083 }
1084
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1086 {
1087 /* Constant pool loads. */
1088 unsigned int constant;
1089 CORE_ADDR loc;
1090
1091 offset = bits (inst2, 0, 7) << 2;
1092 if (insn & 0x0080)
1093 loc = start + 4 + offset;
1094 else
1095 loc = start + 4 - offset;
1096
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1099
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1102 }
1103
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1105 {
1106 /* Don't scan past anything that might change control flow. */
1107 break;
1108 }
1109 else
1110 {
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1114 }
1115
1116 start += 2;
1117 }
1118 else if (thumb_instruction_changes_pc (insn))
1119 {
1120 /* Don't scan past anything that might change control flow. */
1121 break;
1122 }
1123 else
1124 {
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1128 }
1129
1130 start += 2;
1131 }
1132
1133 arm_debug_printf ("Prologue scan stopped at %s",
1134 paddress (gdbarch, start));
1135
1136 if (unrecognized_pc == 0)
1137 unrecognized_pc = start;
1138
1139 if (cache == NULL)
1140 return unrecognized_pc;
1141
1142 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1143 {
1144 /* Frame pointer is fp. Frame size is constant. */
1145 cache->framereg = ARM_FP_REGNUM;
1146 cache->framesize = -regs[ARM_FP_REGNUM].k;
1147 }
1148 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1149 {
1150 /* Frame pointer is r7. Frame size is constant. */
1151 cache->framereg = THUMB_FP_REGNUM;
1152 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1153 }
1154 else
1155 {
1156 /* Try the stack pointer... this is a bit desperate. */
1157 cache->framereg = ARM_SP_REGNUM;
1158 cache->framesize = -regs[ARM_SP_REGNUM].k;
1159 }
1160
1161 for (i = 0; i < 16; i++)
1162 if (stack.find_reg (gdbarch, i, &offset))
1163 cache->saved_regs[i].set_addr (offset);
1164
1165 return unrecognized_pc;
1166 }
1167
1168
1169 /* Try to analyze the instructions starting from PC, which load symbol
1170 __stack_chk_guard. Return the address of instruction after loading this
1171 symbol, set the dest register number to *BASEREG, and set the size of
1172 instructions for loading symbol in OFFSET. Return 0 if instructions are
1173 not recognized. */
1174
1175 static CORE_ADDR
1176 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1177 unsigned int *destreg, int *offset)
1178 {
1179 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1180 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1181 unsigned int low, high, address;
1182
1183 address = 0;
1184 if (is_thumb)
1185 {
1186 unsigned short insn1
1187 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1188
1189 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1190 {
1191 *destreg = bits (insn1, 8, 10);
1192 *offset = 2;
1193 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1194 address = read_memory_unsigned_integer (address, 4,
1195 byte_order_for_code);
1196 }
1197 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1198 {
1199 unsigned short insn2
1200 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1201
1202 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203
1204 insn1
1205 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1206 insn2
1207 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1208
1209 /* movt Rd, #const */
1210 if ((insn1 & 0xfbc0) == 0xf2c0)
1211 {
1212 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1213 *destreg = bits (insn2, 8, 11);
1214 *offset = 8;
1215 address = (high << 16 | low);
1216 }
1217 }
1218 }
1219 else
1220 {
1221 unsigned int insn
1222 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1223
1224 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1225 {
1226 address = bits (insn, 0, 11) + pc + 8;
1227 address = read_memory_unsigned_integer (address, 4,
1228 byte_order_for_code);
1229
1230 *destreg = bits (insn, 12, 15);
1231 *offset = 4;
1232 }
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1234 {
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1236
1237 insn
1238 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1239
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1241 {
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1244 *offset = 8;
1245 address = (high << 16 | low);
1246 }
1247 }
1248 }
1249
1250 return address;
1251 }
1252
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1256
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1261
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1266 they are,
1267
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1270
1271 On ARMv5t, it is,
1272
1273 ldr Rn, .Label
1274 ....
1275 .Lable:
1276 .word __stack_chk_guard
1277
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1282
1283 static CORE_ADDR
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1285 {
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct bound_minimal_symbol stack_chk_guard;
1289 int offset;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1291 CORE_ADDR addr;
1292
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1295 &basereg, &offset);
1296 if (!addr)
1297 return pc;
1298
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1301 Otherwise, this sequence cannot be for stack protector. */
1302 if (stack_chk_guard.minsym == NULL
1303 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1304 return pc;
1305
1306 if (is_thumb)
1307 {
1308 unsigned int destreg;
1309 unsigned short insn
1310 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1311
1312 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1313 if ((insn & 0xf800) != 0x6800)
1314 return pc;
1315 if (bits (insn, 3, 5) != basereg)
1316 return pc;
1317 destreg = bits (insn, 0, 2);
1318
1319 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1320 byte_order_for_code);
1321 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1322 if ((insn & 0xf800) != 0x6000)
1323 return pc;
1324 if (destreg != bits (insn, 0, 2))
1325 return pc;
1326 }
1327 else
1328 {
1329 unsigned int destreg;
1330 unsigned int insn
1331 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1332
1333 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1334 if ((insn & 0x0e500000) != 0x04100000)
1335 return pc;
1336 if (bits (insn, 16, 19) != basereg)
1337 return pc;
1338 destreg = bits (insn, 12, 15);
1339 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1340 insn = read_code_unsigned_integer (pc + offset + 4,
1341 4, byte_order_for_code);
1342 if ((insn & 0x0e500000) != 0x04000000)
1343 return pc;
1344 if (bits (insn, 12, 15) != destreg)
1345 return pc;
1346 }
1347 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1348 on arm. */
1349 if (is_thumb)
1350 return pc + offset + 4;
1351 else
1352 return pc + offset + 8;
1353 }
1354
1355 /* Advance the PC across any function entry prologue instructions to
1356 reach some "real" code.
1357
1358 The APCS (ARM Procedure Call Standard) defines the following
1359 prologue:
1360
1361 mov ip, sp
1362 [stmfd sp!, {a1,a2,a3,a4}]
1363 stmfd sp!, {...,fp,ip,lr,pc}
1364 [stfe f7, [sp, #-12]!]
1365 [stfe f6, [sp, #-12]!]
1366 [stfe f5, [sp, #-12]!]
1367 [stfe f4, [sp, #-12]!]
1368 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1369
1370 static CORE_ADDR
1371 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1372 {
1373 CORE_ADDR func_addr, limit_pc;
1374
1375 /* See if we can determine the end of the prologue via the symbol table.
1376 If so, then return either PC, or the PC after the prologue, whichever
1377 is greater. */
1378 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1379 {
1380 CORE_ADDR post_prologue_pc
1381 = skip_prologue_using_sal (gdbarch, func_addr);
1382 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1383
1384 if (post_prologue_pc)
1385 post_prologue_pc
1386 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1387
1388
1389 /* GCC always emits a line note before the prologue and another
1390 one after, even if the two are at the same address or on the
1391 same line. Take advantage of this so that we do not need to
1392 know every instruction that might appear in the prologue. We
1393 will have producer information for most binaries; if it is
1394 missing (e.g. for -gstabs), assuming the GNU tools. */
1395 if (post_prologue_pc
1396 && (cust == NULL
1397 || COMPUNIT_PRODUCER (cust) == NULL
1398 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1399 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1400 return post_prologue_pc;
1401
1402 if (post_prologue_pc != 0)
1403 {
1404 CORE_ADDR analyzed_limit;
1405
1406 /* For non-GCC compilers, make sure the entire line is an
1407 acceptable prologue; GDB will round this function's
1408 return value up to the end of the following line so we
1409 can not skip just part of a line (and we do not want to).
1410
1411 RealView does not treat the prologue specially, but does
1412 associate prologue code with the opening brace; so this
1413 lets us skip the first line if we think it is the opening
1414 brace. */
1415 if (arm_pc_is_thumb (gdbarch, func_addr))
1416 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1417 post_prologue_pc, NULL);
1418 else
1419 analyzed_limit
1420 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1421 NULL, target_arm_instruction_reader ());
1422
1423 if (analyzed_limit != post_prologue_pc)
1424 return func_addr;
1425
1426 return post_prologue_pc;
1427 }
1428 }
1429
1430 /* Can't determine prologue from the symbol table, need to examine
1431 instructions. */
1432
1433 /* Find an upper limit on the function prologue using the debug
1434 information. If the debug information could not be used to provide
1435 that bound, then use an arbitrary large number as the upper bound. */
1436 /* Like arm_scan_prologue, stop no later than pc + 64. */
1437 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1438 if (limit_pc == 0)
1439 limit_pc = pc + 64; /* Magic. */
1440
1441
1442 /* Check if this is Thumb code. */
1443 if (arm_pc_is_thumb (gdbarch, pc))
1444 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1445 else
1446 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1447 target_arm_instruction_reader ());
1448 }
1449
1450 /* *INDENT-OFF* */
1451 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1452 This function decodes a Thumb function prologue to determine:
1453 1) the size of the stack frame
1454 2) which registers are saved on it
1455 3) the offsets of saved regs
1456 4) the offset from the stack pointer to the frame pointer
1457
1458 A typical Thumb function prologue would create this stack frame
1459 (offsets relative to FP)
1460 old SP -> 24 stack parameters
1461 20 LR
1462 16 R7
1463 R7 -> 0 local variables (16 bytes)
1464 SP -> -12 additional stack space (12 bytes)
1465 The frame size would thus be 36 bytes, and the frame offset would be
1466 12 bytes. The frame register is R7.
1467
1468 The comments for thumb_skip_prolog() describe the algorithm we use
1469 to detect the end of the prolog. */
1470 /* *INDENT-ON* */
1471
1472 static void
1473 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1474 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1475 {
1476 CORE_ADDR prologue_start;
1477 CORE_ADDR prologue_end;
1478
1479 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1480 &prologue_end))
1481 {
1482 /* See comment in arm_scan_prologue for an explanation of
1483 this heuristics. */
1484 if (prologue_end > prologue_start + 64)
1485 {
1486 prologue_end = prologue_start + 64;
1487 }
1488 }
1489 else
1490 /* We're in the boondocks: we have no idea where the start of the
1491 function is. */
1492 return;
1493
1494 prologue_end = std::min (prologue_end, prev_pc);
1495
1496 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1497 }
1498
1499 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1500 otherwise. */
1501
1502 static int
1503 arm_instruction_restores_sp (unsigned int insn)
1504 {
1505 if (bits (insn, 28, 31) != INST_NV)
1506 {
1507 if ((insn & 0x0df0f000) == 0x0080d000
1508 /* ADD SP (register or immediate). */
1509 || (insn & 0x0df0f000) == 0x0040d000
1510 /* SUB SP (register or immediate). */
1511 || (insn & 0x0ffffff0) == 0x01a0d000
1512 /* MOV SP. */
1513 || (insn & 0x0fff0000) == 0x08bd0000
1514 /* POP (LDMIA). */
1515 || (insn & 0x0fff0000) == 0x049d0000)
1516 /* POP of a single register. */
1517 return 1;
1518 }
1519
1520 return 0;
1521 }
1522
1523 /* Implement immediate value decoding, as described in section A5.2.4
1524 (Modified immediate constants in ARM instructions) of the ARM Architecture
1525 Reference Manual (ARMv7-A and ARMv7-R edition). */
1526
1527 static uint32_t
1528 arm_expand_immediate (uint32_t imm)
1529 {
1530 /* Immediate values are 12 bits long. */
1531 gdb_assert ((imm & 0xfffff000) == 0);
1532
1533 uint32_t unrotated_value = imm & 0xff;
1534 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1535
1536 if (rotate_amount == 0)
1537 return unrotated_value;
1538
1539 return ((unrotated_value >> rotate_amount)
1540 | (unrotated_value << (32 - rotate_amount)));
1541 }
1542
1543 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1544 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1545 fill it in. Return the first address not recognized as a prologue
1546 instruction.
1547
1548 We recognize all the instructions typically found in ARM prologues,
1549 plus harmless instructions which can be skipped (either for analysis
1550 purposes, or a more restrictive set that can be skipped when finding
1551 the end of the prologue). */
1552
1553 static CORE_ADDR
1554 arm_analyze_prologue (struct gdbarch *gdbarch,
1555 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1556 struct arm_prologue_cache *cache,
1557 const arm_instruction_reader &insn_reader)
1558 {
1559 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1560 int regno;
1561 CORE_ADDR offset, current_pc;
1562 pv_t regs[ARM_FPS_REGNUM];
1563 CORE_ADDR unrecognized_pc = 0;
1564 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1565
1566 /* Search the prologue looking for instructions that set up the
1567 frame pointer, adjust the stack pointer, and save registers.
1568
1569 Be careful, however, and if it doesn't look like a prologue,
1570 don't try to scan it. If, for instance, a frameless function
1571 begins with stmfd sp!, then we will tell ourselves there is
1572 a frame, which will confuse stack traceback, as well as "finish"
1573 and other operations that rely on a knowledge of the stack
1574 traceback. */
1575
1576 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1577 regs[regno] = pv_register (regno, 0);
1578 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1579
1580 for (current_pc = prologue_start;
1581 current_pc < prologue_end;
1582 current_pc += 4)
1583 {
1584 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1585
1586 if (insn == 0xe1a0c00d) /* mov ip, sp */
1587 {
1588 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1589 continue;
1590 }
1591 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1592 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1593 {
1594 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1595 int rd = bits (insn, 12, 15);
1596 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1597 continue;
1598 }
1599 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1600 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1601 {
1602 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1603 int rd = bits (insn, 12, 15);
1604 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1605 continue;
1606 }
1607 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1608 [sp, #-4]! */
1609 {
1610 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1611 break;
1612 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1613 stack.store (regs[ARM_SP_REGNUM], 4,
1614 regs[bits (insn, 12, 15)]);
1615 continue;
1616 }
1617 else if ((insn & 0xffff0000) == 0xe92d0000)
1618 /* stmfd sp!, {..., fp, ip, lr, pc}
1619 or
1620 stmfd sp!, {a1, a2, a3, a4} */
1621 {
1622 int mask = insn & 0xffff;
1623
1624 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1625 break;
1626
1627 /* Calculate offsets of saved registers. */
1628 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1629 if (mask & (1 << regno))
1630 {
1631 regs[ARM_SP_REGNUM]
1632 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1633 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1634 }
1635 }
1636 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1637 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1638 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1639 {
1640 /* No need to add this to saved_regs -- it's just an arg reg. */
1641 continue;
1642 }
1643 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1644 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1645 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1646 {
1647 /* No need to add this to saved_regs -- it's just an arg reg. */
1648 continue;
1649 }
1650 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1651 { registers } */
1652 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1653 {
1654 /* No need to add this to saved_regs -- it's just arg regs. */
1655 continue;
1656 }
1657 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1658 {
1659 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1660 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1661 }
1662 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1663 {
1664 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1665 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1666 }
1667 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1668 [sp, -#c]! */
1669 && tdep->have_fpa_registers)
1670 {
1671 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1672 break;
1673
1674 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1675 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1676 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1677 }
1678 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1679 [sp!] */
1680 && tdep->have_fpa_registers)
1681 {
1682 int n_saved_fp_regs;
1683 unsigned int fp_start_reg, fp_bound_reg;
1684
1685 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1686 break;
1687
1688 if ((insn & 0x800) == 0x800) /* N0 is set */
1689 {
1690 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1691 n_saved_fp_regs = 3;
1692 else
1693 n_saved_fp_regs = 1;
1694 }
1695 else
1696 {
1697 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1698 n_saved_fp_regs = 2;
1699 else
1700 n_saved_fp_regs = 4;
1701 }
1702
1703 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1704 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1705 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1706 {
1707 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1708 stack.store (regs[ARM_SP_REGNUM], 12,
1709 regs[fp_start_reg++]);
1710 }
1711 }
1712 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1713 {
1714 /* Allow some special function calls when skipping the
1715 prologue; GCC generates these before storing arguments to
1716 the stack. */
1717 CORE_ADDR dest = BranchDest (current_pc, insn);
1718
1719 if (skip_prologue_function (gdbarch, dest, 0))
1720 continue;
1721 else
1722 break;
1723 }
1724 else if ((insn & 0xf0000000) != 0xe0000000)
1725 break; /* Condition not true, exit early. */
1726 else if (arm_instruction_changes_pc (insn))
1727 /* Don't scan past anything that might change control flow. */
1728 break;
1729 else if (arm_instruction_restores_sp (insn))
1730 {
1731 /* Don't scan past the epilogue. */
1732 break;
1733 }
1734 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1735 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1736 /* Ignore block loads from the stack, potentially copying
1737 parameters from memory. */
1738 continue;
1739 else if ((insn & 0xfc500000) == 0xe4100000
1740 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1741 /* Similarly ignore single loads from the stack. */
1742 continue;
1743 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1744 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1745 register instead of the stack. */
1746 continue;
1747 else
1748 {
1749 /* The optimizer might shove anything into the prologue, if
1750 we build up cache (cache != NULL) from scanning prologue,
1751 we just skip what we don't recognize and scan further to
1752 make cache as complete as possible. However, if we skip
1753 prologue, we'll stop immediately on unrecognized
1754 instruction. */
1755 unrecognized_pc = current_pc;
1756 if (cache != NULL)
1757 continue;
1758 else
1759 break;
1760 }
1761 }
1762
1763 if (unrecognized_pc == 0)
1764 unrecognized_pc = current_pc;
1765
1766 if (cache)
1767 {
1768 int framereg, framesize;
1769
1770 /* The frame size is just the distance from the frame register
1771 to the original stack pointer. */
1772 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1773 {
1774 /* Frame pointer is fp. */
1775 framereg = ARM_FP_REGNUM;
1776 framesize = -regs[ARM_FP_REGNUM].k;
1777 }
1778 else
1779 {
1780 /* Try the stack pointer... this is a bit desperate. */
1781 framereg = ARM_SP_REGNUM;
1782 framesize = -regs[ARM_SP_REGNUM].k;
1783 }
1784
1785 cache->framereg = framereg;
1786 cache->framesize = framesize;
1787
1788 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1789 if (stack.find_reg (gdbarch, regno, &offset))
1790 cache->saved_regs[regno].set_addr (offset);
1791 }
1792
1793 arm_debug_printf ("Prologue scan stopped at %s",
1794 paddress (gdbarch, unrecognized_pc));
1795
1796 return unrecognized_pc;
1797 }
1798
1799 static void
1800 arm_scan_prologue (struct frame_info *this_frame,
1801 struct arm_prologue_cache *cache)
1802 {
1803 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1805 CORE_ADDR prologue_start, prologue_end;
1806 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1807 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1808 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1809
1810 /* Assume there is no frame until proven otherwise. */
1811 cache->framereg = ARM_SP_REGNUM;
1812 cache->framesize = 0;
1813
1814 /* Check for Thumb prologue. */
1815 if (arm_frame_is_thumb (this_frame))
1816 {
1817 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1818 return;
1819 }
1820
1821 /* Find the function prologue. If we can't find the function in
1822 the symbol table, peek in the stack frame to find the PC. */
1823 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1824 &prologue_end))
1825 {
1826 /* One way to find the end of the prologue (which works well
1827 for unoptimized code) is to do the following:
1828
1829 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1830
1831 if (sal.line == 0)
1832 prologue_end = prev_pc;
1833 else if (sal.end < prologue_end)
1834 prologue_end = sal.end;
1835
1836 This mechanism is very accurate so long as the optimizer
1837 doesn't move any instructions from the function body into the
1838 prologue. If this happens, sal.end will be the last
1839 instruction in the first hunk of prologue code just before
1840 the first instruction that the scheduler has moved from
1841 the body to the prologue.
1842
1843 In order to make sure that we scan all of the prologue
1844 instructions, we use a slightly less accurate mechanism which
1845 may scan more than necessary. To help compensate for this
1846 lack of accuracy, the prologue scanning loop below contains
1847 several clauses which'll cause the loop to terminate early if
1848 an implausible prologue instruction is encountered.
1849
1850 The expression
1851
1852 prologue_start + 64
1853
1854 is a suitable endpoint since it accounts for the largest
1855 possible prologue plus up to five instructions inserted by
1856 the scheduler. */
1857
1858 if (prologue_end > prologue_start + 64)
1859 {
1860 prologue_end = prologue_start + 64; /* See above. */
1861 }
1862 }
1863 else
1864 {
1865 /* We have no symbol information. Our only option is to assume this
1866 function has a standard stack frame and the normal frame register.
1867 Then, we can find the value of our frame pointer on entrance to
1868 the callee (or at the present moment if this is the innermost frame).
1869 The value stored there should be the address of the stmfd + 8. */
1870 CORE_ADDR frame_loc;
1871 ULONGEST return_value;
1872
1873 /* AAPCS does not use a frame register, so we can abort here. */
1874 if (tdep->arm_abi == ARM_ABI_AAPCS)
1875 return;
1876
1877 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1878 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1879 &return_value))
1880 return;
1881 else
1882 {
1883 prologue_start = gdbarch_addr_bits_remove
1884 (gdbarch, return_value) - 8;
1885 prologue_end = prologue_start + 64; /* See above. */
1886 }
1887 }
1888
1889 if (prev_pc < prologue_end)
1890 prologue_end = prev_pc;
1891
1892 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1893 target_arm_instruction_reader ());
1894 }
1895
1896 static struct arm_prologue_cache *
1897 arm_make_prologue_cache (struct frame_info *this_frame)
1898 {
1899 int reg;
1900 struct arm_prologue_cache *cache;
1901 CORE_ADDR unwound_fp;
1902
1903 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1904 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1905
1906 arm_scan_prologue (this_frame, cache);
1907
1908 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1909 if (unwound_fp == 0)
1910 return cache;
1911
1912 cache->prev_sp = unwound_fp + cache->framesize;
1913
1914 /* Calculate actual addresses of saved registers using offsets
1915 determined by arm_scan_prologue. */
1916 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1917 if (cache->saved_regs[reg].is_addr ())
1918 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
1919 + cache->prev_sp);
1920
1921 return cache;
1922 }
1923
1924 /* Implementation of the stop_reason hook for arm_prologue frames. */
1925
1926 static enum unwind_stop_reason
1927 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1928 void **this_cache)
1929 {
1930 struct arm_prologue_cache *cache;
1931 CORE_ADDR pc;
1932
1933 if (*this_cache == NULL)
1934 *this_cache = arm_make_prologue_cache (this_frame);
1935 cache = (struct arm_prologue_cache *) *this_cache;
1936
1937 /* This is meant to halt the backtrace at "_start". */
1938 pc = get_frame_pc (this_frame);
1939 gdbarch *arch = get_frame_arch (this_frame);
1940 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
1941 if (pc <= tdep->lowest_pc)
1942 return UNWIND_OUTERMOST;
1943
1944 /* If we've hit a wall, stop. */
1945 if (cache->prev_sp == 0)
1946 return UNWIND_OUTERMOST;
1947
1948 return UNWIND_NO_REASON;
1949 }
1950
1951 /* Our frame ID for a normal frame is the current function's starting PC
1952 and the caller's SP when we were called. */
1953
1954 static void
1955 arm_prologue_this_id (struct frame_info *this_frame,
1956 void **this_cache,
1957 struct frame_id *this_id)
1958 {
1959 struct arm_prologue_cache *cache;
1960 struct frame_id id;
1961 CORE_ADDR pc, func;
1962
1963 if (*this_cache == NULL)
1964 *this_cache = arm_make_prologue_cache (this_frame);
1965 cache = (struct arm_prologue_cache *) *this_cache;
1966
1967 /* Use function start address as part of the frame ID. If we cannot
1968 identify the start address (due to missing symbol information),
1969 fall back to just using the current PC. */
1970 pc = get_frame_pc (this_frame);
1971 func = get_frame_func (this_frame);
1972 if (!func)
1973 func = pc;
1974
1975 id = frame_id_build (cache->prev_sp, func);
1976 *this_id = id;
1977 }
1978
1979 static struct value *
1980 arm_prologue_prev_register (struct frame_info *this_frame,
1981 void **this_cache,
1982 int prev_regnum)
1983 {
1984 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1985 struct arm_prologue_cache *cache;
1986
1987 if (*this_cache == NULL)
1988 *this_cache = arm_make_prologue_cache (this_frame);
1989 cache = (struct arm_prologue_cache *) *this_cache;
1990
1991 /* If we are asked to unwind the PC, then we need to return the LR
1992 instead. The prologue may save PC, but it will point into this
1993 frame's prologue, not the next frame's resume location. Also
1994 strip the saved T bit. A valid LR may have the low bit set, but
1995 a valid PC never does. */
1996 if (prev_regnum == ARM_PC_REGNUM)
1997 {
1998 CORE_ADDR lr;
1999
2000 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2001 return frame_unwind_got_constant (this_frame, prev_regnum,
2002 arm_addr_bits_remove (gdbarch, lr));
2003 }
2004
2005 /* SP is generally not saved to the stack, but this frame is
2006 identified by the next frame's stack pointer at the time of the call.
2007 The value was already reconstructed into PREV_SP. */
2008 if (prev_regnum == ARM_SP_REGNUM)
2009 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2010
2011 /* The CPSR may have been changed by the call instruction and by the
2012 called function. The only bit we can reconstruct is the T bit,
2013 by checking the low bit of LR as of the call. This is a reliable
2014 indicator of Thumb-ness except for some ARM v4T pre-interworking
2015 Thumb code, which could get away with a clear low bit as long as
2016 the called function did not use bx. Guess that all other
2017 bits are unchanged; the condition flags are presumably lost,
2018 but the processor status is likely valid. */
2019 if (prev_regnum == ARM_PS_REGNUM)
2020 {
2021 CORE_ADDR lr, cpsr;
2022 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2023
2024 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2025 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2026 if (IS_THUMB_ADDR (lr))
2027 cpsr |= t_bit;
2028 else
2029 cpsr &= ~t_bit;
2030 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2031 }
2032
2033 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2034 prev_regnum);
2035 }
2036
2037 static frame_unwind arm_prologue_unwind = {
2038 "arm prologue",
2039 NORMAL_FRAME,
2040 arm_prologue_unwind_stop_reason,
2041 arm_prologue_this_id,
2042 arm_prologue_prev_register,
2043 NULL,
2044 default_frame_sniffer
2045 };
2046
2047 /* Maintain a list of ARM exception table entries per objfile, similar to the
2048 list of mapping symbols. We only cache entries for standard ARM-defined
2049 personality routines; the cache will contain only the frame unwinding
2050 instructions associated with the entry (not the descriptors). */
2051
2052 struct arm_exidx_entry
2053 {
2054 CORE_ADDR addr;
2055 gdb_byte *entry;
2056
2057 bool operator< (const arm_exidx_entry &other) const
2058 {
2059 return addr < other.addr;
2060 }
2061 };
2062
2063 struct arm_exidx_data
2064 {
2065 std::vector<std::vector<arm_exidx_entry>> section_maps;
2066 };
2067
2068 /* Per-BFD key to store exception handling information. */
2069 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2070
2071 static struct obj_section *
2072 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2073 {
2074 struct obj_section *osect;
2075
2076 ALL_OBJFILE_OSECTIONS (objfile, osect)
2077 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2078 {
2079 bfd_vma start, size;
2080 start = bfd_section_vma (osect->the_bfd_section);
2081 size = bfd_section_size (osect->the_bfd_section);
2082
2083 if (start <= vma && vma < start + size)
2084 return osect;
2085 }
2086
2087 return NULL;
2088 }
2089
2090 /* Parse contents of exception table and exception index sections
2091 of OBJFILE, and fill in the exception table entry cache.
2092
2093 For each entry that refers to a standard ARM-defined personality
2094 routine, extract the frame unwinding instructions (from either
2095 the index or the table section). The unwinding instructions
2096 are normalized by:
2097 - extracting them from the rest of the table data
2098 - converting to host endianness
2099 - appending the implicit 0xb0 ("Finish") code
2100
2101 The extracted and normalized instructions are stored for later
2102 retrieval by the arm_find_exidx_entry routine. */
2103
2104 static void
2105 arm_exidx_new_objfile (struct objfile *objfile)
2106 {
2107 struct arm_exidx_data *data;
2108 asection *exidx, *extab;
2109 bfd_vma exidx_vma = 0, extab_vma = 0;
2110 LONGEST i;
2111
2112 /* If we've already touched this file, do nothing. */
2113 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2114 return;
2115
2116 /* Read contents of exception table and index. */
2117 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2118 gdb::byte_vector exidx_data;
2119 if (exidx)
2120 {
2121 exidx_vma = bfd_section_vma (exidx);
2122 exidx_data.resize (bfd_section_size (exidx));
2123
2124 if (!bfd_get_section_contents (objfile->obfd, exidx,
2125 exidx_data.data (), 0,
2126 exidx_data.size ()))
2127 return;
2128 }
2129
2130 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2131 gdb::byte_vector extab_data;
2132 if (extab)
2133 {
2134 extab_vma = bfd_section_vma (extab);
2135 extab_data.resize (bfd_section_size (extab));
2136
2137 if (!bfd_get_section_contents (objfile->obfd, extab,
2138 extab_data.data (), 0,
2139 extab_data.size ()))
2140 return;
2141 }
2142
2143 /* Allocate exception table data structure. */
2144 data = arm_exidx_data_key.emplace (objfile->obfd);
2145 data->section_maps.resize (objfile->obfd->section_count);
2146
2147 /* Fill in exception table. */
2148 for (i = 0; i < exidx_data.size () / 8; i++)
2149 {
2150 struct arm_exidx_entry new_exidx_entry;
2151 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2152 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2153 exidx_data.data () + i * 8 + 4);
2154 bfd_vma addr = 0, word = 0;
2155 int n_bytes = 0, n_words = 0;
2156 struct obj_section *sec;
2157 gdb_byte *entry = NULL;
2158
2159 /* Extract address of start of function. */
2160 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2161 idx += exidx_vma + i * 8;
2162
2163 /* Find section containing function and compute section offset. */
2164 sec = arm_obj_section_from_vma (objfile, idx);
2165 if (sec == NULL)
2166 continue;
2167 idx -= bfd_section_vma (sec->the_bfd_section);
2168
2169 /* Determine address of exception table entry. */
2170 if (val == 1)
2171 {
2172 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2173 }
2174 else if ((val & 0xff000000) == 0x80000000)
2175 {
2176 /* Exception table entry embedded in .ARM.exidx
2177 -- must be short form. */
2178 word = val;
2179 n_bytes = 3;
2180 }
2181 else if (!(val & 0x80000000))
2182 {
2183 /* Exception table entry in .ARM.extab. */
2184 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2185 addr += exidx_vma + i * 8 + 4;
2186
2187 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2188 {
2189 word = bfd_h_get_32 (objfile->obfd,
2190 extab_data.data () + addr - extab_vma);
2191 addr += 4;
2192
2193 if ((word & 0xff000000) == 0x80000000)
2194 {
2195 /* Short form. */
2196 n_bytes = 3;
2197 }
2198 else if ((word & 0xff000000) == 0x81000000
2199 || (word & 0xff000000) == 0x82000000)
2200 {
2201 /* Long form. */
2202 n_bytes = 2;
2203 n_words = ((word >> 16) & 0xff);
2204 }
2205 else if (!(word & 0x80000000))
2206 {
2207 bfd_vma pers;
2208 struct obj_section *pers_sec;
2209 int gnu_personality = 0;
2210
2211 /* Custom personality routine. */
2212 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2213 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2214
2215 /* Check whether we've got one of the variants of the
2216 GNU personality routines. */
2217 pers_sec = arm_obj_section_from_vma (objfile, pers);
2218 if (pers_sec)
2219 {
2220 static const char *personality[] =
2221 {
2222 "__gcc_personality_v0",
2223 "__gxx_personality_v0",
2224 "__gcj_personality_v0",
2225 "__gnu_objc_personality_v0",
2226 NULL
2227 };
2228
2229 CORE_ADDR pc = pers + pers_sec->offset ();
2230 int k;
2231
2232 for (k = 0; personality[k]; k++)
2233 if (lookup_minimal_symbol_by_pc_name
2234 (pc, personality[k], objfile))
2235 {
2236 gnu_personality = 1;
2237 break;
2238 }
2239 }
2240
2241 /* If so, the next word contains a word count in the high
2242 byte, followed by the same unwind instructions as the
2243 pre-defined forms. */
2244 if (gnu_personality
2245 && addr + 4 <= extab_vma + extab_data.size ())
2246 {
2247 word = bfd_h_get_32 (objfile->obfd,
2248 (extab_data.data ()
2249 + addr - extab_vma));
2250 addr += 4;
2251 n_bytes = 3;
2252 n_words = ((word >> 24) & 0xff);
2253 }
2254 }
2255 }
2256 }
2257
2258 /* Sanity check address. */
2259 if (n_words)
2260 if (addr < extab_vma
2261 || addr + 4 * n_words > extab_vma + extab_data.size ())
2262 n_words = n_bytes = 0;
2263
2264 /* The unwind instructions reside in WORD (only the N_BYTES least
2265 significant bytes are valid), followed by N_WORDS words in the
2266 extab section starting at ADDR. */
2267 if (n_bytes || n_words)
2268 {
2269 gdb_byte *p = entry
2270 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2271 n_bytes + n_words * 4 + 1);
2272
2273 while (n_bytes--)
2274 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2275
2276 while (n_words--)
2277 {
2278 word = bfd_h_get_32 (objfile->obfd,
2279 extab_data.data () + addr - extab_vma);
2280 addr += 4;
2281
2282 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2283 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2284 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2285 *p++ = (gdb_byte) (word & 0xff);
2286 }
2287
2288 /* Implied "Finish" to terminate the list. */
2289 *p++ = 0xb0;
2290 }
2291
2292 /* Push entry onto vector. They are guaranteed to always
2293 appear in order of increasing addresses. */
2294 new_exidx_entry.addr = idx;
2295 new_exidx_entry.entry = entry;
2296 data->section_maps[sec->the_bfd_section->index].push_back
2297 (new_exidx_entry);
2298 }
2299 }
2300
2301 /* Search for the exception table entry covering MEMADDR. If one is found,
2302 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2303 set *START to the start of the region covered by this entry. */
2304
2305 static gdb_byte *
2306 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2307 {
2308 struct obj_section *sec;
2309
2310 sec = find_pc_section (memaddr);
2311 if (sec != NULL)
2312 {
2313 struct arm_exidx_data *data;
2314 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2315
2316 data = arm_exidx_data_key.get (sec->objfile->obfd);
2317 if (data != NULL)
2318 {
2319 std::vector<arm_exidx_entry> &map
2320 = data->section_maps[sec->the_bfd_section->index];
2321 if (!map.empty ())
2322 {
2323 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2324
2325 /* std::lower_bound finds the earliest ordered insertion
2326 point. If the following symbol starts at this exact
2327 address, we use that; otherwise, the preceding
2328 exception table entry covers this address. */
2329 if (idx < map.end ())
2330 {
2331 if (idx->addr == map_key.addr)
2332 {
2333 if (start)
2334 *start = idx->addr + sec->addr ();
2335 return idx->entry;
2336 }
2337 }
2338
2339 if (idx > map.begin ())
2340 {
2341 idx = idx - 1;
2342 if (start)
2343 *start = idx->addr + sec->addr ();
2344 return idx->entry;
2345 }
2346 }
2347 }
2348 }
2349
2350 return NULL;
2351 }
2352
2353 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2354 instruction list from the ARM exception table entry ENTRY, allocate and
2355 return a prologue cache structure describing how to unwind this frame.
2356
2357 Return NULL if the unwinding instruction list contains a "spare",
2358 "reserved" or "refuse to unwind" instruction as defined in section
2359 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2360 for the ARM Architecture" document. */
2361
2362 static struct arm_prologue_cache *
2363 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2364 {
2365 CORE_ADDR vsp = 0;
2366 int vsp_valid = 0;
2367
2368 struct arm_prologue_cache *cache;
2369 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2370 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2371
2372 for (;;)
2373 {
2374 gdb_byte insn;
2375
2376 /* Whenever we reload SP, we actually have to retrieve its
2377 actual value in the current frame. */
2378 if (!vsp_valid)
2379 {
2380 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2381 {
2382 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2383 vsp = get_frame_register_unsigned (this_frame, reg);
2384 }
2385 else
2386 {
2387 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2388 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2389 }
2390
2391 vsp_valid = 1;
2392 }
2393
2394 /* Decode next unwind instruction. */
2395 insn = *entry++;
2396
2397 if ((insn & 0xc0) == 0)
2398 {
2399 int offset = insn & 0x3f;
2400 vsp += (offset << 2) + 4;
2401 }
2402 else if ((insn & 0xc0) == 0x40)
2403 {
2404 int offset = insn & 0x3f;
2405 vsp -= (offset << 2) + 4;
2406 }
2407 else if ((insn & 0xf0) == 0x80)
2408 {
2409 int mask = ((insn & 0xf) << 8) | *entry++;
2410 int i;
2411
2412 /* The special case of an all-zero mask identifies
2413 "Refuse to unwind". We return NULL to fall back
2414 to the prologue analyzer. */
2415 if (mask == 0)
2416 return NULL;
2417
2418 /* Pop registers r4..r15 under mask. */
2419 for (i = 0; i < 12; i++)
2420 if (mask & (1 << i))
2421 {
2422 cache->saved_regs[4 + i].set_addr (vsp);
2423 vsp += 4;
2424 }
2425
2426 /* Special-case popping SP -- we need to reload vsp. */
2427 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2428 vsp_valid = 0;
2429 }
2430 else if ((insn & 0xf0) == 0x90)
2431 {
2432 int reg = insn & 0xf;
2433
2434 /* Reserved cases. */
2435 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2436 return NULL;
2437
2438 /* Set SP from another register and mark VSP for reload. */
2439 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2440 vsp_valid = 0;
2441 }
2442 else if ((insn & 0xf0) == 0xa0)
2443 {
2444 int count = insn & 0x7;
2445 int pop_lr = (insn & 0x8) != 0;
2446 int i;
2447
2448 /* Pop r4..r[4+count]. */
2449 for (i = 0; i <= count; i++)
2450 {
2451 cache->saved_regs[4 + i].set_addr (vsp);
2452 vsp += 4;
2453 }
2454
2455 /* If indicated by flag, pop LR as well. */
2456 if (pop_lr)
2457 {
2458 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2459 vsp += 4;
2460 }
2461 }
2462 else if (insn == 0xb0)
2463 {
2464 /* We could only have updated PC by popping into it; if so, it
2465 will show up as address. Otherwise, copy LR into PC. */
2466 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2467 cache->saved_regs[ARM_PC_REGNUM]
2468 = cache->saved_regs[ARM_LR_REGNUM];
2469
2470 /* We're done. */
2471 break;
2472 }
2473 else if (insn == 0xb1)
2474 {
2475 int mask = *entry++;
2476 int i;
2477
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask == 0 || mask >= 16)
2480 return NULL;
2481
2482 /* Pop r0..r3 under mask. */
2483 for (i = 0; i < 4; i++)
2484 if (mask & (1 << i))
2485 {
2486 cache->saved_regs[i].set_addr (vsp);
2487 vsp += 4;
2488 }
2489 }
2490 else if (insn == 0xb2)
2491 {
2492 ULONGEST offset = 0;
2493 unsigned shift = 0;
2494
2495 do
2496 {
2497 offset |= (*entry & 0x7f) << shift;
2498 shift += 7;
2499 }
2500 while (*entry++ & 0x80);
2501
2502 vsp += 0x204 + (offset << 2);
2503 }
2504 else if (insn == 0xb3)
2505 {
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2508 int i;
2509
2510 /* Only registers D0..D15 are valid here. */
2511 if (start + count >= 16)
2512 return NULL;
2513
2514 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2515 for (i = 0; i <= count; i++)
2516 {
2517 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2518 vsp += 8;
2519 }
2520
2521 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2522 vsp += 4;
2523 }
2524 else if ((insn & 0xf8) == 0xb8)
2525 {
2526 int count = insn & 0x7;
2527 int i;
2528
2529 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2530 for (i = 0; i <= count; i++)
2531 {
2532 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2533 vsp += 8;
2534 }
2535
2536 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2537 vsp += 4;
2538 }
2539 else if (insn == 0xc6)
2540 {
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2543 int i;
2544
2545 /* Only registers WR0..WR15 are valid. */
2546 if (start + count >= 16)
2547 return NULL;
2548
2549 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2550 for (i = 0; i <= count; i++)
2551 {
2552 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2553 vsp += 8;
2554 }
2555 }
2556 else if (insn == 0xc7)
2557 {
2558 int mask = *entry++;
2559 int i;
2560
2561 /* All-zero mask and mask >= 16 is "spare". */
2562 if (mask == 0 || mask >= 16)
2563 return NULL;
2564
2565 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2566 for (i = 0; i < 4; i++)
2567 if (mask & (1 << i))
2568 {
2569 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2570 vsp += 4;
2571 }
2572 }
2573 else if ((insn & 0xf8) == 0xc0)
2574 {
2575 int count = insn & 0x7;
2576 int i;
2577
2578 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2579 for (i = 0; i <= count; i++)
2580 {
2581 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2582 vsp += 8;
2583 }
2584 }
2585 else if (insn == 0xc8)
2586 {
2587 int start = *entry >> 4;
2588 int count = (*entry++) & 0xf;
2589 int i;
2590
2591 /* Only registers D0..D31 are valid. */
2592 if (start + count >= 16)
2593 return NULL;
2594
2595 /* Pop VFP double-precision registers
2596 D[16+start]..D[16+start+count]. */
2597 for (i = 0; i <= count; i++)
2598 {
2599 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2600 vsp += 8;
2601 }
2602 }
2603 else if (insn == 0xc9)
2604 {
2605 int start = *entry >> 4;
2606 int count = (*entry++) & 0xf;
2607 int i;
2608
2609 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2610 for (i = 0; i <= count; i++)
2611 {
2612 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2613 vsp += 8;
2614 }
2615 }
2616 else if ((insn & 0xf8) == 0xd0)
2617 {
2618 int count = insn & 0x7;
2619 int i;
2620
2621 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2622 for (i = 0; i <= count; i++)
2623 {
2624 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2625 vsp += 8;
2626 }
2627 }
2628 else
2629 {
2630 /* Everything else is "spare". */
2631 return NULL;
2632 }
2633 }
2634
2635 /* If we restore SP from a register, assume this was the frame register.
2636 Otherwise just fall back to SP as frame register. */
2637 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2638 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2639 else
2640 cache->framereg = ARM_SP_REGNUM;
2641
2642 /* Determine offset to previous frame. */
2643 cache->framesize
2644 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2645
2646 /* We already got the previous SP. */
2647 cache->prev_sp = vsp;
2648
2649 return cache;
2650 }
2651
2652 /* Unwinding via ARM exception table entries. Note that the sniffer
2653 already computes a filled-in prologue cache, which is then used
2654 with the same arm_prologue_this_id and arm_prologue_prev_register
2655 routines also used for prologue-parsing based unwinding. */
2656
2657 static int
2658 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2659 struct frame_info *this_frame,
2660 void **this_prologue_cache)
2661 {
2662 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2663 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2664 CORE_ADDR addr_in_block, exidx_region, func_start;
2665 struct arm_prologue_cache *cache;
2666 gdb_byte *entry;
2667
2668 /* See if we have an ARM exception table entry covering this address. */
2669 addr_in_block = get_frame_address_in_block (this_frame);
2670 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2671 if (!entry)
2672 return 0;
2673
2674 /* The ARM exception table does not describe unwind information
2675 for arbitrary PC values, but is guaranteed to be correct only
2676 at call sites. We have to decide here whether we want to use
2677 ARM exception table information for this frame, or fall back
2678 to using prologue parsing. (Note that if we have DWARF CFI,
2679 this sniffer isn't even called -- CFI is always preferred.)
2680
2681 Before we make this decision, however, we check whether we
2682 actually have *symbol* information for the current frame.
2683 If not, prologue parsing would not work anyway, so we might
2684 as well use the exception table and hope for the best. */
2685 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2686 {
2687 int exc_valid = 0;
2688
2689 /* If the next frame is "normal", we are at a call site in this
2690 frame, so exception information is guaranteed to be valid. */
2691 if (get_next_frame (this_frame)
2692 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2693 exc_valid = 1;
2694
2695 /* We also assume exception information is valid if we're currently
2696 blocked in a system call. The system library is supposed to
2697 ensure this, so that e.g. pthread cancellation works. */
2698 if (arm_frame_is_thumb (this_frame))
2699 {
2700 ULONGEST insn;
2701
2702 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2703 2, byte_order_for_code, &insn)
2704 && (insn & 0xff00) == 0xdf00 /* svc */)
2705 exc_valid = 1;
2706 }
2707 else
2708 {
2709 ULONGEST insn;
2710
2711 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2712 4, byte_order_for_code, &insn)
2713 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2714 exc_valid = 1;
2715 }
2716
2717 /* Bail out if we don't know that exception information is valid. */
2718 if (!exc_valid)
2719 return 0;
2720
2721 /* The ARM exception index does not mark the *end* of the region
2722 covered by the entry, and some functions will not have any entry.
2723 To correctly recognize the end of the covered region, the linker
2724 should have inserted dummy records with a CANTUNWIND marker.
2725
2726 Unfortunately, current versions of GNU ld do not reliably do
2727 this, and thus we may have found an incorrect entry above.
2728 As a (temporary) sanity check, we only use the entry if it
2729 lies *within* the bounds of the function. Note that this check
2730 might reject perfectly valid entries that just happen to cover
2731 multiple functions; therefore this check ought to be removed
2732 once the linker is fixed. */
2733 if (func_start > exidx_region)
2734 return 0;
2735 }
2736
2737 /* Decode the list of unwinding instructions into a prologue cache.
2738 Note that this may fail due to e.g. a "refuse to unwind" code. */
2739 cache = arm_exidx_fill_cache (this_frame, entry);
2740 if (!cache)
2741 return 0;
2742
2743 *this_prologue_cache = cache;
2744 return 1;
2745 }
2746
2747 struct frame_unwind arm_exidx_unwind = {
2748 "arm exidx",
2749 NORMAL_FRAME,
2750 default_frame_unwind_stop_reason,
2751 arm_prologue_this_id,
2752 arm_prologue_prev_register,
2753 NULL,
2754 arm_exidx_unwind_sniffer
2755 };
2756
2757 static struct arm_prologue_cache *
2758 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2759 {
2760 struct arm_prologue_cache *cache;
2761 int reg;
2762
2763 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2764 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2765
2766 /* Still rely on the offset calculated from prologue. */
2767 arm_scan_prologue (this_frame, cache);
2768
2769 /* Since we are in epilogue, the SP has been restored. */
2770 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2771
2772 /* Calculate actual addresses of saved registers using offsets
2773 determined by arm_scan_prologue. */
2774 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2775 if (cache->saved_regs[reg].is_addr ())
2776 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
2777 + cache->prev_sp);
2778
2779 return cache;
2780 }
2781
2782 /* Implementation of function hook 'this_id' in
2783 'struct frame_uwnind' for epilogue unwinder. */
2784
2785 static void
2786 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2787 void **this_cache,
2788 struct frame_id *this_id)
2789 {
2790 struct arm_prologue_cache *cache;
2791 CORE_ADDR pc, func;
2792
2793 if (*this_cache == NULL)
2794 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2795 cache = (struct arm_prologue_cache *) *this_cache;
2796
2797 /* Use function start address as part of the frame ID. If we cannot
2798 identify the start address (due to missing symbol information),
2799 fall back to just using the current PC. */
2800 pc = get_frame_pc (this_frame);
2801 func = get_frame_func (this_frame);
2802 if (func == 0)
2803 func = pc;
2804
2805 (*this_id) = frame_id_build (cache->prev_sp, pc);
2806 }
2807
2808 /* Implementation of function hook 'prev_register' in
2809 'struct frame_uwnind' for epilogue unwinder. */
2810
2811 static struct value *
2812 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2813 void **this_cache, int regnum)
2814 {
2815 if (*this_cache == NULL)
2816 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2817
2818 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2819 }
2820
2821 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2822 CORE_ADDR pc);
2823 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2824 CORE_ADDR pc);
2825
2826 /* Implementation of function hook 'sniffer' in
2827 'struct frame_uwnind' for epilogue unwinder. */
2828
2829 static int
2830 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2831 struct frame_info *this_frame,
2832 void **this_prologue_cache)
2833 {
2834 if (frame_relative_level (this_frame) == 0)
2835 {
2836 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2837 CORE_ADDR pc = get_frame_pc (this_frame);
2838
2839 if (arm_frame_is_thumb (this_frame))
2840 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2841 else
2842 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2843 }
2844 else
2845 return 0;
2846 }
2847
2848 /* Frame unwinder from epilogue. */
2849
2850 static const struct frame_unwind arm_epilogue_frame_unwind =
2851 {
2852 "arm epilogue",
2853 NORMAL_FRAME,
2854 default_frame_unwind_stop_reason,
2855 arm_epilogue_frame_this_id,
2856 arm_epilogue_frame_prev_register,
2857 NULL,
2858 arm_epilogue_frame_sniffer,
2859 };
2860
2861 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2862 trampoline, return the target PC. Otherwise return 0.
2863
2864 void call0a (char c, short s, int i, long l) {}
2865
2866 int main (void)
2867 {
2868 (*pointer_to_call0a) (c, s, i, l);
2869 }
2870
2871 Instead of calling a stub library function _call_via_xx (xx is
2872 the register name), GCC may inline the trampoline in the object
2873 file as below (register r2 has the address of call0a).
2874
2875 .global main
2876 .type main, %function
2877 ...
2878 bl .L1
2879 ...
2880 .size main, .-main
2881
2882 .L1:
2883 bx r2
2884
2885 The trampoline 'bx r2' doesn't belong to main. */
2886
2887 static CORE_ADDR
2888 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2889 {
2890 /* The heuristics of recognizing such trampoline is that FRAME is
2891 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2892 if (arm_frame_is_thumb (frame))
2893 {
2894 gdb_byte buf[2];
2895
2896 if (target_read_memory (pc, buf, 2) == 0)
2897 {
2898 struct gdbarch *gdbarch = get_frame_arch (frame);
2899 enum bfd_endian byte_order_for_code
2900 = gdbarch_byte_order_for_code (gdbarch);
2901 uint16_t insn
2902 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2903
2904 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2905 {
2906 CORE_ADDR dest
2907 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2908
2909 /* Clear the LSB so that gdb core sets step-resume
2910 breakpoint at the right address. */
2911 return UNMAKE_THUMB_ADDR (dest);
2912 }
2913 }
2914 }
2915
2916 return 0;
2917 }
2918
2919 static struct arm_prologue_cache *
2920 arm_make_stub_cache (struct frame_info *this_frame)
2921 {
2922 struct arm_prologue_cache *cache;
2923
2924 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2925 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2926
2927 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2928
2929 return cache;
2930 }
2931
2932 /* Our frame ID for a stub frame is the current SP and LR. */
2933
2934 static void
2935 arm_stub_this_id (struct frame_info *this_frame,
2936 void **this_cache,
2937 struct frame_id *this_id)
2938 {
2939 struct arm_prologue_cache *cache;
2940
2941 if (*this_cache == NULL)
2942 *this_cache = arm_make_stub_cache (this_frame);
2943 cache = (struct arm_prologue_cache *) *this_cache;
2944
2945 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2946 }
2947
2948 static int
2949 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2950 struct frame_info *this_frame,
2951 void **this_prologue_cache)
2952 {
2953 CORE_ADDR addr_in_block;
2954 gdb_byte dummy[4];
2955 CORE_ADDR pc, start_addr;
2956 const char *name;
2957
2958 addr_in_block = get_frame_address_in_block (this_frame);
2959 pc = get_frame_pc (this_frame);
2960 if (in_plt_section (addr_in_block)
2961 /* We also use the stub winder if the target memory is unreadable
2962 to avoid having the prologue unwinder trying to read it. */
2963 || target_read_memory (pc, dummy, 4) != 0)
2964 return 1;
2965
2966 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2967 && arm_skip_bx_reg (this_frame, pc) != 0)
2968 return 1;
2969
2970 return 0;
2971 }
2972
2973 struct frame_unwind arm_stub_unwind = {
2974 "arm stub",
2975 NORMAL_FRAME,
2976 default_frame_unwind_stop_reason,
2977 arm_stub_this_id,
2978 arm_prologue_prev_register,
2979 NULL,
2980 arm_stub_unwind_sniffer
2981 };
2982
2983 /* Put here the code to store, into CACHE->saved_regs, the addresses
2984 of the saved registers of frame described by THIS_FRAME. CACHE is
2985 returned. */
2986
2987 static struct arm_prologue_cache *
2988 arm_m_exception_cache (struct frame_info *this_frame)
2989 {
2990 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2991 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2992 struct arm_prologue_cache *cache;
2993 CORE_ADDR lr;
2994 CORE_ADDR sp;
2995 CORE_ADDR unwound_sp;
2996 LONGEST xpsr;
2997 uint32_t exc_return;
2998 uint32_t process_stack_used;
2999 uint32_t extended_frame_used;
3000 uint32_t secure_stack_used;
3001
3002 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3003 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3004
3005 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3006 describes which bits in LR that define which stack was used prior
3007 to the exception and if FPU is used (causing extended stack frame). */
3008
3009 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3010 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3011
3012 /* Check EXC_RETURN indicator bits. */
3013 exc_return = (((lr >> 28) & 0xf) == 0xf);
3014
3015 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3016 process_stack_used = ((lr & (1 << 2)) != 0);
3017 if (exc_return && process_stack_used)
3018 {
3019 /* Thread (process) stack used.
3020 Potentially this could be other register defined by target, but PSP
3021 can be considered a standard name for the "Process Stack Pointer".
3022 To be fully aware of system registers like MSP and PSP, these could
3023 be added to a separate XML arm-m-system-profile that is valid for
3024 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3025 corefile off-line, then these registers must be defined by GDB,
3026 and also be included in the corefile regsets. */
3027
3028 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3029 if (psp_regnum == -1)
3030 {
3031 /* Thread (process) stack could not be fetched,
3032 give warning and exit. */
3033
3034 warning (_("no PSP thread stack unwinding supported."));
3035
3036 /* Terminate any further stack unwinding by refer to self. */
3037 cache->prev_sp = sp;
3038 return cache;
3039 }
3040 else
3041 {
3042 /* Thread (process) stack used, use PSP as SP. */
3043 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3044 }
3045 }
3046 else
3047 {
3048 /* Main stack used, use MSP as SP. */
3049 unwound_sp = sp;
3050 }
3051
3052 /* The hardware saves eight 32-bit words, comprising xPSR,
3053 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3054 "B1.5.6 Exception entry behavior" in
3055 "ARMv7-M Architecture Reference Manual". */
3056 cache->saved_regs[0].set_addr (unwound_sp);
3057 cache->saved_regs[1].set_addr (unwound_sp + 4);
3058 cache->saved_regs[2].set_addr (unwound_sp + 8);
3059 cache->saved_regs[3].set_addr (unwound_sp + 12);
3060 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + 16);
3061 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 20);
3062 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 24);
3063 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 28);
3064
3065 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3066 type used. */
3067 extended_frame_used = ((lr & (1 << 4)) == 0);
3068 if (exc_return && extended_frame_used)
3069 {
3070 int i;
3071 int fpu_regs_stack_offset;
3072
3073 /* This code does not take into account the lazy stacking, see "Lazy
3074 context save of FP state", in B1.5.7, also ARM AN298, supported
3075 by Cortex-M4F architecture.
3076 To fully handle this the FPCCR register (Floating-point Context
3077 Control Register) needs to be read out and the bits ASPEN and LSPEN
3078 could be checked to setup correct lazy stacked FP registers.
3079 This register is located at address 0xE000EF34. */
3080
3081 /* Extended stack frame type used. */
3082 fpu_regs_stack_offset = unwound_sp + 0x20;
3083 for (i = 0; i < 16; i++)
3084 {
3085 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3086 fpu_regs_stack_offset += 4;
3087 }
3088 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + 0x60);
3089
3090 /* Offset 0x64 is reserved. */
3091 cache->prev_sp = unwound_sp + 0x68;
3092 }
3093 else
3094 {
3095 /* Standard stack frame type used. */
3096 cache->prev_sp = unwound_sp + 0x20;
3097 }
3098
3099 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3100 secure_stack_used = ((lr & (1 << 6)) != 0);
3101 if (exc_return && secure_stack_used)
3102 {
3103 /* ARMv8-M Exception and interrupt handling is not considered here.
3104 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3105 the Secure or Non-secure stack was used. To separate Secure and
3106 Non-secure stacks, processors that are based on the ARMv8-M
3107 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3108 In addition, a stack limit feature is provided using stack limit
3109 registers (accessible using MSR and MRS instructions) in Privileged
3110 level. */
3111 }
3112
3113 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3114 aligner between the top of the 32-byte stack frame and the
3115 previous context's stack pointer. */
3116 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3117 && (xpsr & (1 << 9)) != 0)
3118 cache->prev_sp += 4;
3119
3120 return cache;
3121 }
3122
3123 /* Implementation of function hook 'this_id' in
3124 'struct frame_uwnind'. */
3125
3126 static void
3127 arm_m_exception_this_id (struct frame_info *this_frame,
3128 void **this_cache,
3129 struct frame_id *this_id)
3130 {
3131 struct arm_prologue_cache *cache;
3132
3133 if (*this_cache == NULL)
3134 *this_cache = arm_m_exception_cache (this_frame);
3135 cache = (struct arm_prologue_cache *) *this_cache;
3136
3137 /* Our frame ID for a stub frame is the current SP and LR. */
3138 *this_id = frame_id_build (cache->prev_sp,
3139 get_frame_pc (this_frame));
3140 }
3141
3142 /* Implementation of function hook 'prev_register' in
3143 'struct frame_uwnind'. */
3144
3145 static struct value *
3146 arm_m_exception_prev_register (struct frame_info *this_frame,
3147 void **this_cache,
3148 int prev_regnum)
3149 {
3150 struct arm_prologue_cache *cache;
3151
3152 if (*this_cache == NULL)
3153 *this_cache = arm_m_exception_cache (this_frame);
3154 cache = (struct arm_prologue_cache *) *this_cache;
3155
3156 /* The value was already reconstructed into PREV_SP. */
3157 if (prev_regnum == ARM_SP_REGNUM)
3158 return frame_unwind_got_constant (this_frame, prev_regnum,
3159 cache->prev_sp);
3160
3161 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3162 prev_regnum);
3163 }
3164
3165 /* Implementation of function hook 'sniffer' in
3166 'struct frame_uwnind'. */
3167
3168 static int
3169 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3170 struct frame_info *this_frame,
3171 void **this_prologue_cache)
3172 {
3173 CORE_ADDR this_pc = get_frame_pc (this_frame);
3174
3175 /* No need to check is_m; this sniffer is only registered for
3176 M-profile architectures. */
3177
3178 /* Check if exception frame returns to a magic PC value. */
3179 return arm_m_addr_is_magic (this_pc);
3180 }
3181
3182 /* Frame unwinder for M-profile exceptions. */
3183
3184 struct frame_unwind arm_m_exception_unwind =
3185 {
3186 "arm m exception",
3187 SIGTRAMP_FRAME,
3188 default_frame_unwind_stop_reason,
3189 arm_m_exception_this_id,
3190 arm_m_exception_prev_register,
3191 NULL,
3192 arm_m_exception_unwind_sniffer
3193 };
3194
3195 static CORE_ADDR
3196 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3197 {
3198 struct arm_prologue_cache *cache;
3199
3200 if (*this_cache == NULL)
3201 *this_cache = arm_make_prologue_cache (this_frame);
3202 cache = (struct arm_prologue_cache *) *this_cache;
3203
3204 return cache->prev_sp - cache->framesize;
3205 }
3206
3207 struct frame_base arm_normal_base = {
3208 &arm_prologue_unwind,
3209 arm_normal_frame_base,
3210 arm_normal_frame_base,
3211 arm_normal_frame_base
3212 };
3213
3214 static struct value *
3215 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3216 int regnum)
3217 {
3218 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3219 CORE_ADDR lr, cpsr;
3220 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3221
3222 switch (regnum)
3223 {
3224 case ARM_PC_REGNUM:
3225 /* The PC is normally copied from the return column, which
3226 describes saves of LR. However, that version may have an
3227 extra bit set to indicate Thumb state. The bit is not
3228 part of the PC. */
3229 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3230 return frame_unwind_got_constant (this_frame, regnum,
3231 arm_addr_bits_remove (gdbarch, lr));
3232
3233 case ARM_PS_REGNUM:
3234 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3235 cpsr = get_frame_register_unsigned (this_frame, regnum);
3236 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3237 if (IS_THUMB_ADDR (lr))
3238 cpsr |= t_bit;
3239 else
3240 cpsr &= ~t_bit;
3241 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3242
3243 default:
3244 internal_error (__FILE__, __LINE__,
3245 _("Unexpected register %d"), regnum);
3246 }
3247 }
3248
3249 static void
3250 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3251 struct dwarf2_frame_state_reg *reg,
3252 struct frame_info *this_frame)
3253 {
3254 switch (regnum)
3255 {
3256 case ARM_PC_REGNUM:
3257 case ARM_PS_REGNUM:
3258 reg->how = DWARF2_FRAME_REG_FN;
3259 reg->loc.fn = arm_dwarf2_prev_register;
3260 break;
3261 case ARM_SP_REGNUM:
3262 reg->how = DWARF2_FRAME_REG_CFA;
3263 break;
3264 }
3265 }
3266
3267 /* Implement the stack_frame_destroyed_p gdbarch method. */
3268
3269 static int
3270 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3271 {
3272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3273 unsigned int insn, insn2;
3274 int found_return = 0, found_stack_adjust = 0;
3275 CORE_ADDR func_start, func_end;
3276 CORE_ADDR scan_pc;
3277 gdb_byte buf[4];
3278
3279 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3280 return 0;
3281
3282 /* The epilogue is a sequence of instructions along the following lines:
3283
3284 - add stack frame size to SP or FP
3285 - [if frame pointer used] restore SP from FP
3286 - restore registers from SP [may include PC]
3287 - a return-type instruction [if PC wasn't already restored]
3288
3289 In a first pass, we scan forward from the current PC and verify the
3290 instructions we find as compatible with this sequence, ending in a
3291 return instruction.
3292
3293 However, this is not sufficient to distinguish indirect function calls
3294 within a function from indirect tail calls in the epilogue in some cases.
3295 Therefore, if we didn't already find any SP-changing instruction during
3296 forward scan, we add a backward scanning heuristic to ensure we actually
3297 are in the epilogue. */
3298
3299 scan_pc = pc;
3300 while (scan_pc < func_end && !found_return)
3301 {
3302 if (target_read_memory (scan_pc, buf, 2))
3303 break;
3304
3305 scan_pc += 2;
3306 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3307
3308 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3309 found_return = 1;
3310 else if (insn == 0x46f7) /* mov pc, lr */
3311 found_return = 1;
3312 else if (thumb_instruction_restores_sp (insn))
3313 {
3314 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3315 found_return = 1;
3316 }
3317 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3318 {
3319 if (target_read_memory (scan_pc, buf, 2))
3320 break;
3321
3322 scan_pc += 2;
3323 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3324
3325 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3326 {
3327 if (insn2 & 0x8000) /* <registers> include PC. */
3328 found_return = 1;
3329 }
3330 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3331 && (insn2 & 0x0fff) == 0x0b04)
3332 {
3333 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3334 found_return = 1;
3335 }
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 ;
3339 else
3340 break;
3341 }
3342 else
3343 break;
3344 }
3345
3346 if (!found_return)
3347 return 0;
3348
3349 /* Since any instruction in the epilogue sequence, with the possible
3350 exception of return itself, updates the stack pointer, we need to
3351 scan backwards for at most one instruction. Try either a 16-bit or
3352 a 32-bit instruction. This is just a heuristic, so we do not worry
3353 too much about false positives. */
3354
3355 if (pc - 4 < func_start)
3356 return 0;
3357 if (target_read_memory (pc - 4, buf, 4))
3358 return 0;
3359
3360 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3361 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3362
3363 if (thumb_instruction_restores_sp (insn2))
3364 found_stack_adjust = 1;
3365 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3366 found_stack_adjust = 1;
3367 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3368 && (insn2 & 0x0fff) == 0x0b04)
3369 found_stack_adjust = 1;
3370 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3371 && (insn2 & 0x0e00) == 0x0a00)
3372 found_stack_adjust = 1;
3373
3374 return found_stack_adjust;
3375 }
3376
3377 static int
3378 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3379 {
3380 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3381 unsigned int insn;
3382 int found_return;
3383 CORE_ADDR func_start, func_end;
3384
3385 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3386 return 0;
3387
3388 /* We are in the epilogue if the previous instruction was a stack
3389 adjustment and the next instruction is a possible return (bx, mov
3390 pc, or pop). We could have to scan backwards to find the stack
3391 adjustment, or forwards to find the return, but this is a decent
3392 approximation. First scan forwards. */
3393
3394 found_return = 0;
3395 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3396 if (bits (insn, 28, 31) != INST_NV)
3397 {
3398 if ((insn & 0x0ffffff0) == 0x012fff10)
3399 /* BX. */
3400 found_return = 1;
3401 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3402 /* MOV PC. */
3403 found_return = 1;
3404 else if ((insn & 0x0fff0000) == 0x08bd0000
3405 && (insn & 0x0000c000) != 0)
3406 /* POP (LDMIA), including PC or LR. */
3407 found_return = 1;
3408 }
3409
3410 if (!found_return)
3411 return 0;
3412
3413 /* Scan backwards. This is just a heuristic, so do not worry about
3414 false positives from mode changes. */
3415
3416 if (pc < func_start + 4)
3417 return 0;
3418
3419 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3420 if (arm_instruction_restores_sp (insn))
3421 return 1;
3422
3423 return 0;
3424 }
3425
3426 /* Implement the stack_frame_destroyed_p gdbarch method. */
3427
3428 static int
3429 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3430 {
3431 if (arm_pc_is_thumb (gdbarch, pc))
3432 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3433 else
3434 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3435 }
3436
3437 /* When arguments must be pushed onto the stack, they go on in reverse
3438 order. The code below implements a FILO (stack) to do this. */
3439
3440 struct stack_item
3441 {
3442 int len;
3443 struct stack_item *prev;
3444 gdb_byte *data;
3445 };
3446
3447 static struct stack_item *
3448 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3449 {
3450 struct stack_item *si;
3451 si = XNEW (struct stack_item);
3452 si->data = (gdb_byte *) xmalloc (len);
3453 si->len = len;
3454 si->prev = prev;
3455 memcpy (si->data, contents, len);
3456 return si;
3457 }
3458
3459 static struct stack_item *
3460 pop_stack_item (struct stack_item *si)
3461 {
3462 struct stack_item *dead = si;
3463 si = si->prev;
3464 xfree (dead->data);
3465 xfree (dead);
3466 return si;
3467 }
3468
3469 /* Implement the gdbarch type alignment method, overrides the generic
3470 alignment algorithm for anything that is arm specific. */
3471
3472 static ULONGEST
3473 arm_type_align (gdbarch *gdbarch, struct type *t)
3474 {
3475 t = check_typedef (t);
3476 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3477 {
3478 /* Use the natural alignment for vector types (the same for
3479 scalar type), but the maximum alignment is 64-bit. */
3480 if (TYPE_LENGTH (t) > 8)
3481 return 8;
3482 else
3483 return TYPE_LENGTH (t);
3484 }
3485
3486 /* Allow the common code to calculate the alignment. */
3487 return 0;
3488 }
3489
3490 /* Possible base types for a candidate for passing and returning in
3491 VFP registers. */
3492
3493 enum arm_vfp_cprc_base_type
3494 {
3495 VFP_CPRC_UNKNOWN,
3496 VFP_CPRC_SINGLE,
3497 VFP_CPRC_DOUBLE,
3498 VFP_CPRC_VEC64,
3499 VFP_CPRC_VEC128
3500 };
3501
3502 /* The length of one element of base type B. */
3503
3504 static unsigned
3505 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3506 {
3507 switch (b)
3508 {
3509 case VFP_CPRC_SINGLE:
3510 return 4;
3511 case VFP_CPRC_DOUBLE:
3512 return 8;
3513 case VFP_CPRC_VEC64:
3514 return 8;
3515 case VFP_CPRC_VEC128:
3516 return 16;
3517 default:
3518 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3519 (int) b);
3520 }
3521 }
3522
3523 /* The character ('s', 'd' or 'q') for the type of VFP register used
3524 for passing base type B. */
3525
3526 static int
3527 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3528 {
3529 switch (b)
3530 {
3531 case VFP_CPRC_SINGLE:
3532 return 's';
3533 case VFP_CPRC_DOUBLE:
3534 return 'd';
3535 case VFP_CPRC_VEC64:
3536 return 'd';
3537 case VFP_CPRC_VEC128:
3538 return 'q';
3539 default:
3540 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3541 (int) b);
3542 }
3543 }
3544
3545 /* Determine whether T may be part of a candidate for passing and
3546 returning in VFP registers, ignoring the limit on the total number
3547 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3548 classification of the first valid component found; if it is not
3549 VFP_CPRC_UNKNOWN, all components must have the same classification
3550 as *BASE_TYPE. If it is found that T contains a type not permitted
3551 for passing and returning in VFP registers, a type differently
3552 classified from *BASE_TYPE, or two types differently classified
3553 from each other, return -1, otherwise return the total number of
3554 base-type elements found (possibly 0 in an empty structure or
3555 array). Vector types are not currently supported, matching the
3556 generic AAPCS support. */
3557
3558 static int
3559 arm_vfp_cprc_sub_candidate (struct type *t,
3560 enum arm_vfp_cprc_base_type *base_type)
3561 {
3562 t = check_typedef (t);
3563 switch (t->code ())
3564 {
3565 case TYPE_CODE_FLT:
3566 switch (TYPE_LENGTH (t))
3567 {
3568 case 4:
3569 if (*base_type == VFP_CPRC_UNKNOWN)
3570 *base_type = VFP_CPRC_SINGLE;
3571 else if (*base_type != VFP_CPRC_SINGLE)
3572 return -1;
3573 return 1;
3574
3575 case 8:
3576 if (*base_type == VFP_CPRC_UNKNOWN)
3577 *base_type = VFP_CPRC_DOUBLE;
3578 else if (*base_type != VFP_CPRC_DOUBLE)
3579 return -1;
3580 return 1;
3581
3582 default:
3583 return -1;
3584 }
3585 break;
3586
3587 case TYPE_CODE_COMPLEX:
3588 /* Arguments of complex T where T is one of the types float or
3589 double get treated as if they are implemented as:
3590
3591 struct complexT
3592 {
3593 T real;
3594 T imag;
3595 };
3596
3597 */
3598 switch (TYPE_LENGTH (t))
3599 {
3600 case 8:
3601 if (*base_type == VFP_CPRC_UNKNOWN)
3602 *base_type = VFP_CPRC_SINGLE;
3603 else if (*base_type != VFP_CPRC_SINGLE)
3604 return -1;
3605 return 2;
3606
3607 case 16:
3608 if (*base_type == VFP_CPRC_UNKNOWN)
3609 *base_type = VFP_CPRC_DOUBLE;
3610 else if (*base_type != VFP_CPRC_DOUBLE)
3611 return -1;
3612 return 2;
3613
3614 default:
3615 return -1;
3616 }
3617 break;
3618
3619 case TYPE_CODE_ARRAY:
3620 {
3621 if (t->is_vector ())
3622 {
3623 /* A 64-bit or 128-bit containerized vector type are VFP
3624 CPRCs. */
3625 switch (TYPE_LENGTH (t))
3626 {
3627 case 8:
3628 if (*base_type == VFP_CPRC_UNKNOWN)
3629 *base_type = VFP_CPRC_VEC64;
3630 return 1;
3631 case 16:
3632 if (*base_type == VFP_CPRC_UNKNOWN)
3633 *base_type = VFP_CPRC_VEC128;
3634 return 1;
3635 default:
3636 return -1;
3637 }
3638 }
3639 else
3640 {
3641 int count;
3642 unsigned unitlen;
3643
3644 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3645 base_type);
3646 if (count == -1)
3647 return -1;
3648 if (TYPE_LENGTH (t) == 0)
3649 {
3650 gdb_assert (count == 0);
3651 return 0;
3652 }
3653 else if (count == 0)
3654 return -1;
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3657 return TYPE_LENGTH (t) / unitlen;
3658 }
3659 }
3660 break;
3661
3662 case TYPE_CODE_STRUCT:
3663 {
3664 int count = 0;
3665 unsigned unitlen;
3666 int i;
3667 for (i = 0; i < t->num_fields (); i++)
3668 {
3669 int sub_count = 0;
3670
3671 if (!field_is_static (&t->field (i)))
3672 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3673 base_type);
3674 if (sub_count == -1)
3675 return -1;
3676 count += sub_count;
3677 }
3678 if (TYPE_LENGTH (t) == 0)
3679 {
3680 gdb_assert (count == 0);
3681 return 0;
3682 }
3683 else if (count == 0)
3684 return -1;
3685 unitlen = arm_vfp_cprc_unit_length (*base_type);
3686 if (TYPE_LENGTH (t) != unitlen * count)
3687 return -1;
3688 return count;
3689 }
3690
3691 case TYPE_CODE_UNION:
3692 {
3693 int count = 0;
3694 unsigned unitlen;
3695 int i;
3696 for (i = 0; i < t->num_fields (); i++)
3697 {
3698 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3699 base_type);
3700 if (sub_count == -1)
3701 return -1;
3702 count = (count > sub_count ? count : sub_count);
3703 }
3704 if (TYPE_LENGTH (t) == 0)
3705 {
3706 gdb_assert (count == 0);
3707 return 0;
3708 }
3709 else if (count == 0)
3710 return -1;
3711 unitlen = arm_vfp_cprc_unit_length (*base_type);
3712 if (TYPE_LENGTH (t) != unitlen * count)
3713 return -1;
3714 return count;
3715 }
3716
3717 default:
3718 break;
3719 }
3720
3721 return -1;
3722 }
3723
3724 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3725 if passed to or returned from a non-variadic function with the VFP
3726 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3727 *BASE_TYPE to the base type for T and *COUNT to the number of
3728 elements of that base type before returning. */
3729
3730 static int
3731 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3732 int *count)
3733 {
3734 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3735 int c = arm_vfp_cprc_sub_candidate (t, &b);
3736 if (c <= 0 || c > 4)
3737 return 0;
3738 *base_type = b;
3739 *count = c;
3740 return 1;
3741 }
3742
3743 /* Return 1 if the VFP ABI should be used for passing arguments to and
3744 returning values from a function of type FUNC_TYPE, 0
3745 otherwise. */
3746
3747 static int
3748 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3749 {
3750 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3751
3752 /* Variadic functions always use the base ABI. Assume that functions
3753 without debug info are not variadic. */
3754 if (func_type && check_typedef (func_type)->has_varargs ())
3755 return 0;
3756
3757 /* The VFP ABI is only supported as a variant of AAPCS. */
3758 if (tdep->arm_abi != ARM_ABI_AAPCS)
3759 return 0;
3760
3761 return tdep->fp_model == ARM_FLOAT_VFP;
3762 }
3763
3764 /* We currently only support passing parameters in integer registers, which
3765 conforms with GCC's default model, and VFP argument passing following
3766 the VFP variant of AAPCS. Several other variants exist and
3767 we should probably support some of them based on the selected ABI. */
3768
3769 static CORE_ADDR
3770 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3771 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3772 struct value **args, CORE_ADDR sp,
3773 function_call_return_method return_method,
3774 CORE_ADDR struct_addr)
3775 {
3776 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3777 int argnum;
3778 int argreg;
3779 int nstack;
3780 struct stack_item *si = NULL;
3781 int use_vfp_abi;
3782 struct type *ftype;
3783 unsigned vfp_regs_free = (1 << 16) - 1;
3784 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3785
3786 /* Determine the type of this function and whether the VFP ABI
3787 applies. */
3788 ftype = check_typedef (value_type (function));
3789 if (ftype->code () == TYPE_CODE_PTR)
3790 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3791 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3792
3793 /* Set the return address. For the ARM, the return breakpoint is
3794 always at BP_ADDR. */
3795 if (arm_pc_is_thumb (gdbarch, bp_addr))
3796 bp_addr |= 1;
3797 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3798
3799 /* Walk through the list of args and determine how large a temporary
3800 stack is required. Need to take care here as structs may be
3801 passed on the stack, and we have to push them. */
3802 nstack = 0;
3803
3804 argreg = ARM_A1_REGNUM;
3805 nstack = 0;
3806
3807 /* The struct_return pointer occupies the first parameter
3808 passing register. */
3809 if (return_method == return_method_struct)
3810 {
3811 arm_debug_printf ("struct return in %s = %s",
3812 gdbarch_register_name (gdbarch, argreg),
3813 paddress (gdbarch, struct_addr));
3814
3815 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3816 argreg++;
3817 }
3818
3819 for (argnum = 0; argnum < nargs; argnum++)
3820 {
3821 int len;
3822 struct type *arg_type;
3823 struct type *target_type;
3824 enum type_code typecode;
3825 const bfd_byte *val;
3826 int align;
3827 enum arm_vfp_cprc_base_type vfp_base_type;
3828 int vfp_base_count;
3829 int may_use_core_reg = 1;
3830
3831 arg_type = check_typedef (value_type (args[argnum]));
3832 len = TYPE_LENGTH (arg_type);
3833 target_type = TYPE_TARGET_TYPE (arg_type);
3834 typecode = arg_type->code ();
3835 val = value_contents (args[argnum]).data ();
3836
3837 align = type_align (arg_type);
3838 /* Round alignment up to a whole number of words. */
3839 align = (align + ARM_INT_REGISTER_SIZE - 1)
3840 & ~(ARM_INT_REGISTER_SIZE - 1);
3841 /* Different ABIs have different maximum alignments. */
3842 if (tdep->arm_abi == ARM_ABI_APCS)
3843 {
3844 /* The APCS ABI only requires word alignment. */
3845 align = ARM_INT_REGISTER_SIZE;
3846 }
3847 else
3848 {
3849 /* The AAPCS requires at most doubleword alignment. */
3850 if (align > ARM_INT_REGISTER_SIZE * 2)
3851 align = ARM_INT_REGISTER_SIZE * 2;
3852 }
3853
3854 if (use_vfp_abi
3855 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3856 &vfp_base_count))
3857 {
3858 int regno;
3859 int unit_length;
3860 int shift;
3861 unsigned mask;
3862
3863 /* Because this is a CPRC it cannot go in a core register or
3864 cause a core register to be skipped for alignment.
3865 Either it goes in VFP registers and the rest of this loop
3866 iteration is skipped for this argument, or it goes on the
3867 stack (and the stack alignment code is correct for this
3868 case). */
3869 may_use_core_reg = 0;
3870
3871 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3872 shift = unit_length / 4;
3873 mask = (1 << (shift * vfp_base_count)) - 1;
3874 for (regno = 0; regno < 16; regno += shift)
3875 if (((vfp_regs_free >> regno) & mask) == mask)
3876 break;
3877
3878 if (regno < 16)
3879 {
3880 int reg_char;
3881 int reg_scaled;
3882 int i;
3883
3884 vfp_regs_free &= ~(mask << regno);
3885 reg_scaled = regno / shift;
3886 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3887 for (i = 0; i < vfp_base_count; i++)
3888 {
3889 char name_buf[4];
3890 int regnum;
3891 if (reg_char == 'q')
3892 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3893 val + i * unit_length);
3894 else
3895 {
3896 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3897 reg_char, reg_scaled + i);
3898 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3899 strlen (name_buf));
3900 regcache->cooked_write (regnum, val + i * unit_length);
3901 }
3902 }
3903 continue;
3904 }
3905 else
3906 {
3907 /* This CPRC could not go in VFP registers, so all VFP
3908 registers are now marked as used. */
3909 vfp_regs_free = 0;
3910 }
3911 }
3912
3913 /* Push stack padding for doubleword alignment. */
3914 if (nstack & (align - 1))
3915 {
3916 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3917 nstack += ARM_INT_REGISTER_SIZE;
3918 }
3919
3920 /* Doubleword aligned quantities must go in even register pairs. */
3921 if (may_use_core_reg
3922 && argreg <= ARM_LAST_ARG_REGNUM
3923 && align > ARM_INT_REGISTER_SIZE
3924 && argreg & 1)
3925 argreg++;
3926
3927 /* If the argument is a pointer to a function, and it is a
3928 Thumb function, create a LOCAL copy of the value and set
3929 the THUMB bit in it. */
3930 if (TYPE_CODE_PTR == typecode
3931 && target_type != NULL
3932 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3933 {
3934 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3935 if (arm_pc_is_thumb (gdbarch, regval))
3936 {
3937 bfd_byte *copy = (bfd_byte *) alloca (len);
3938 store_unsigned_integer (copy, len, byte_order,
3939 MAKE_THUMB_ADDR (regval));
3940 val = copy;
3941 }
3942 }
3943
3944 /* Copy the argument to general registers or the stack in
3945 register-sized pieces. Large arguments are split between
3946 registers and stack. */
3947 while (len > 0)
3948 {
3949 int partial_len = len < ARM_INT_REGISTER_SIZE
3950 ? len : ARM_INT_REGISTER_SIZE;
3951 CORE_ADDR regval
3952 = extract_unsigned_integer (val, partial_len, byte_order);
3953
3954 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3955 {
3956 /* The argument is being passed in a general purpose
3957 register. */
3958 if (byte_order == BFD_ENDIAN_BIG)
3959 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3960
3961 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
3962 gdbarch_register_name (gdbarch, argreg),
3963 phex (regval, ARM_INT_REGISTER_SIZE));
3964
3965 regcache_cooked_write_unsigned (regcache, argreg, regval);
3966 argreg++;
3967 }
3968 else
3969 {
3970 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3971
3972 memset (buf, 0, sizeof (buf));
3973 store_unsigned_integer (buf, partial_len, byte_order, regval);
3974
3975 /* Push the arguments onto the stack. */
3976 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
3977 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3978 nstack += ARM_INT_REGISTER_SIZE;
3979 }
3980
3981 len -= partial_len;
3982 val += partial_len;
3983 }
3984 }
3985 /* If we have an odd number of words to push, then decrement the stack
3986 by one word now, so first stack argument will be dword aligned. */
3987 if (nstack & 4)
3988 sp -= 4;
3989
3990 while (si)
3991 {
3992 sp -= si->len;
3993 write_memory (sp, si->data, si->len);
3994 si = pop_stack_item (si);
3995 }
3996
3997 /* Finally, update teh SP register. */
3998 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3999
4000 return sp;
4001 }
4002
4003
4004 /* Always align the frame to an 8-byte boundary. This is required on
4005 some platforms and harmless on the rest. */
4006
4007 static CORE_ADDR
4008 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4009 {
4010 /* Align the stack to eight bytes. */
4011 return sp & ~ (CORE_ADDR) 7;
4012 }
4013
4014 static void
4015 print_fpu_flags (struct ui_file *file, int flags)
4016 {
4017 if (flags & (1 << 0))
4018 fputs_filtered ("IVO ", file);
4019 if (flags & (1 << 1))
4020 fputs_filtered ("DVZ ", file);
4021 if (flags & (1 << 2))
4022 fputs_filtered ("OFL ", file);
4023 if (flags & (1 << 3))
4024 fputs_filtered ("UFL ", file);
4025 if (flags & (1 << 4))
4026 fputs_filtered ("INX ", file);
4027 fputc_filtered ('\n', file);
4028 }
4029
4030 /* Print interesting information about the floating point processor
4031 (if present) or emulator. */
4032 static void
4033 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4034 struct frame_info *frame, const char *args)
4035 {
4036 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4037 int type;
4038
4039 type = (status >> 24) & 127;
4040 if (status & (1 << 31))
4041 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4042 else
4043 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4044 /* i18n: [floating point unit] mask */
4045 fputs_filtered (_("mask: "), file);
4046 print_fpu_flags (file, status >> 16);
4047 /* i18n: [floating point unit] flags */
4048 fputs_filtered (_("flags: "), file);
4049 print_fpu_flags (file, status);
4050 }
4051
4052 /* Construct the ARM extended floating point type. */
4053 static struct type *
4054 arm_ext_type (struct gdbarch *gdbarch)
4055 {
4056 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4057
4058 if (!tdep->arm_ext_type)
4059 tdep->arm_ext_type
4060 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4061 floatformats_arm_ext);
4062
4063 return tdep->arm_ext_type;
4064 }
4065
4066 static struct type *
4067 arm_neon_double_type (struct gdbarch *gdbarch)
4068 {
4069 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4070
4071 if (tdep->neon_double_type == NULL)
4072 {
4073 struct type *t, *elem;
4074
4075 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4076 TYPE_CODE_UNION);
4077 elem = builtin_type (gdbarch)->builtin_uint8;
4078 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4079 elem = builtin_type (gdbarch)->builtin_uint16;
4080 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4081 elem = builtin_type (gdbarch)->builtin_uint32;
4082 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4083 elem = builtin_type (gdbarch)->builtin_uint64;
4084 append_composite_type_field (t, "u64", elem);
4085 elem = builtin_type (gdbarch)->builtin_float;
4086 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4087 elem = builtin_type (gdbarch)->builtin_double;
4088 append_composite_type_field (t, "f64", elem);
4089
4090 t->set_is_vector (true);
4091 t->set_name ("neon_d");
4092 tdep->neon_double_type = t;
4093 }
4094
4095 return tdep->neon_double_type;
4096 }
4097
4098 /* FIXME: The vector types are not correctly ordered on big-endian
4099 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4100 bits of d0 - regardless of what unit size is being held in d0. So
4101 the offset of the first uint8 in d0 is 7, but the offset of the
4102 first float is 4. This code works as-is for little-endian
4103 targets. */
4104
4105 static struct type *
4106 arm_neon_quad_type (struct gdbarch *gdbarch)
4107 {
4108 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4109
4110 if (tdep->neon_quad_type == NULL)
4111 {
4112 struct type *t, *elem;
4113
4114 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4115 TYPE_CODE_UNION);
4116 elem = builtin_type (gdbarch)->builtin_uint8;
4117 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4118 elem = builtin_type (gdbarch)->builtin_uint16;
4119 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4120 elem = builtin_type (gdbarch)->builtin_uint32;
4121 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4122 elem = builtin_type (gdbarch)->builtin_uint64;
4123 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4124 elem = builtin_type (gdbarch)->builtin_float;
4125 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4126 elem = builtin_type (gdbarch)->builtin_double;
4127 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4128
4129 t->set_is_vector (true);
4130 t->set_name ("neon_q");
4131 tdep->neon_quad_type = t;
4132 }
4133
4134 return tdep->neon_quad_type;
4135 }
4136
4137 /* Return true if REGNUM is a Q pseudo register. Return false
4138 otherwise.
4139
4140 REGNUM is the raw register number and not a pseudo-relative register
4141 number. */
4142
4143 static bool
4144 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4145 {
4146 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4147
4148 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4149 MVE (Q0~Q7) features. */
4150 if (tdep->have_q_pseudos
4151 && regnum >= tdep->q_pseudo_base
4152 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4153 return true;
4154
4155 return false;
4156 }
4157
4158 /* Return true if REGNUM is a VFP S pseudo register. Return false
4159 otherwise.
4160
4161 REGNUM is the raw register number and not a pseudo-relative register
4162 number. */
4163
4164 static bool
4165 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4166 {
4167 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4168
4169 if (tdep->have_s_pseudos
4170 && regnum >= tdep->s_pseudo_base
4171 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4172 return true;
4173
4174 return false;
4175 }
4176
4177 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4178 otherwise.
4179
4180 REGNUM is the raw register number and not a pseudo-relative register
4181 number. */
4182
4183 static bool
4184 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4185 {
4186 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4187
4188 if (tdep->have_mve
4189 && regnum >= tdep->mve_pseudo_base
4190 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4191 return true;
4192
4193 return false;
4194 }
4195
4196 /* Return the GDB type object for the "standard" data type of data in
4197 register N. */
4198
4199 static struct type *
4200 arm_register_type (struct gdbarch *gdbarch, int regnum)
4201 {
4202 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4203
4204 if (is_s_pseudo (gdbarch, regnum))
4205 return builtin_type (gdbarch)->builtin_float;
4206
4207 if (is_q_pseudo (gdbarch, regnum))
4208 return arm_neon_quad_type (gdbarch);
4209
4210 if (is_mve_pseudo (gdbarch, regnum))
4211 return builtin_type (gdbarch)->builtin_int16;
4212
4213 /* If the target description has register information, we are only
4214 in this function so that we can override the types of
4215 double-precision registers for NEON. */
4216 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4217 {
4218 struct type *t = tdesc_register_type (gdbarch, regnum);
4219
4220 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4221 && t->code () == TYPE_CODE_FLT
4222 && tdep->have_neon)
4223 return arm_neon_double_type (gdbarch);
4224 else
4225 return t;
4226 }
4227
4228 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4229 {
4230 if (!tdep->have_fpa_registers)
4231 return builtin_type (gdbarch)->builtin_void;
4232
4233 return arm_ext_type (gdbarch);
4234 }
4235 else if (regnum == ARM_SP_REGNUM)
4236 return builtin_type (gdbarch)->builtin_data_ptr;
4237 else if (regnum == ARM_PC_REGNUM)
4238 return builtin_type (gdbarch)->builtin_func_ptr;
4239 else if (regnum >= ARRAY_SIZE (arm_register_names))
4240 /* These registers are only supported on targets which supply
4241 an XML description. */
4242 return builtin_type (gdbarch)->builtin_int0;
4243 else
4244 return builtin_type (gdbarch)->builtin_uint32;
4245 }
4246
4247 /* Map a DWARF register REGNUM onto the appropriate GDB register
4248 number. */
4249
4250 static int
4251 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4252 {
4253 /* Core integer regs. */
4254 if (reg >= 0 && reg <= 15)
4255 return reg;
4256
4257 /* Legacy FPA encoding. These were once used in a way which
4258 overlapped with VFP register numbering, so their use is
4259 discouraged, but GDB doesn't support the ARM toolchain
4260 which used them for VFP. */
4261 if (reg >= 16 && reg <= 23)
4262 return ARM_F0_REGNUM + reg - 16;
4263
4264 /* New assignments for the FPA registers. */
4265 if (reg >= 96 && reg <= 103)
4266 return ARM_F0_REGNUM + reg - 96;
4267
4268 /* WMMX register assignments. */
4269 if (reg >= 104 && reg <= 111)
4270 return ARM_WCGR0_REGNUM + reg - 104;
4271
4272 if (reg >= 112 && reg <= 127)
4273 return ARM_WR0_REGNUM + reg - 112;
4274
4275 if (reg >= 192 && reg <= 199)
4276 return ARM_WC0_REGNUM + reg - 192;
4277
4278 /* VFP v2 registers. A double precision value is actually
4279 in d1 rather than s2, but the ABI only defines numbering
4280 for the single precision registers. This will "just work"
4281 in GDB for little endian targets (we'll read eight bytes,
4282 starting in s0 and then progressing to s1), but will be
4283 reversed on big endian targets with VFP. This won't
4284 be a problem for the new Neon quad registers; you're supposed
4285 to use DW_OP_piece for those. */
4286 if (reg >= 64 && reg <= 95)
4287 {
4288 char name_buf[4];
4289
4290 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4291 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4292 strlen (name_buf));
4293 }
4294
4295 /* VFP v3 / Neon registers. This range is also used for VFP v2
4296 registers, except that it now describes d0 instead of s0. */
4297 if (reg >= 256 && reg <= 287)
4298 {
4299 char name_buf[4];
4300
4301 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4302 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4303 strlen (name_buf));
4304 }
4305
4306 return -1;
4307 }
4308
4309 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4310 static int
4311 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4312 {
4313 int reg = regnum;
4314 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4315
4316 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4317 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4318
4319 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4320 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4321
4322 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4323 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4324
4325 if (reg < NUM_GREGS)
4326 return SIM_ARM_R0_REGNUM + reg;
4327 reg -= NUM_GREGS;
4328
4329 if (reg < NUM_FREGS)
4330 return SIM_ARM_FP0_REGNUM + reg;
4331 reg -= NUM_FREGS;
4332
4333 if (reg < NUM_SREGS)
4334 return SIM_ARM_FPS_REGNUM + reg;
4335 reg -= NUM_SREGS;
4336
4337 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4338 }
4339
4340 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4341 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4342 NULL if an error occurs. BUF is freed. */
4343
4344 static gdb_byte *
4345 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4346 int old_len, int new_len)
4347 {
4348 gdb_byte *new_buf;
4349 int bytes_to_read = new_len - old_len;
4350
4351 new_buf = (gdb_byte *) xmalloc (new_len);
4352 memcpy (new_buf + bytes_to_read, buf, old_len);
4353 xfree (buf);
4354 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4355 {
4356 xfree (new_buf);
4357 return NULL;
4358 }
4359 return new_buf;
4360 }
4361
4362 /* An IT block is at most the 2-byte IT instruction followed by
4363 four 4-byte instructions. The furthest back we must search to
4364 find an IT block that affects the current instruction is thus
4365 2 + 3 * 4 == 14 bytes. */
4366 #define MAX_IT_BLOCK_PREFIX 14
4367
4368 /* Use a quick scan if there are more than this many bytes of
4369 code. */
4370 #define IT_SCAN_THRESHOLD 32
4371
4372 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4373 A breakpoint in an IT block may not be hit, depending on the
4374 condition flags. */
4375 static CORE_ADDR
4376 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4377 {
4378 gdb_byte *buf;
4379 char map_type;
4380 CORE_ADDR boundary, func_start;
4381 int buf_len;
4382 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4383 int i, any, last_it, last_it_count;
4384 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4385
4386 /* If we are using BKPT breakpoints, none of this is necessary. */
4387 if (tdep->thumb2_breakpoint == NULL)
4388 return bpaddr;
4389
4390 /* ARM mode does not have this problem. */
4391 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4392 return bpaddr;
4393
4394 /* We are setting a breakpoint in Thumb code that could potentially
4395 contain an IT block. The first step is to find how much Thumb
4396 code there is; we do not need to read outside of known Thumb
4397 sequences. */
4398 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4399 if (map_type == 0)
4400 /* Thumb-2 code must have mapping symbols to have a chance. */
4401 return bpaddr;
4402
4403 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4404
4405 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4406 && func_start > boundary)
4407 boundary = func_start;
4408
4409 /* Search for a candidate IT instruction. We have to do some fancy
4410 footwork to distinguish a real IT instruction from the second
4411 half of a 32-bit instruction, but there is no need for that if
4412 there's no candidate. */
4413 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4414 if (buf_len == 0)
4415 /* No room for an IT instruction. */
4416 return bpaddr;
4417
4418 buf = (gdb_byte *) xmalloc (buf_len);
4419 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4420 return bpaddr;
4421 any = 0;
4422 for (i = 0; i < buf_len; i += 2)
4423 {
4424 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4425 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4426 {
4427 any = 1;
4428 break;
4429 }
4430 }
4431
4432 if (any == 0)
4433 {
4434 xfree (buf);
4435 return bpaddr;
4436 }
4437
4438 /* OK, the code bytes before this instruction contain at least one
4439 halfword which resembles an IT instruction. We know that it's
4440 Thumb code, but there are still two possibilities. Either the
4441 halfword really is an IT instruction, or it is the second half of
4442 a 32-bit Thumb instruction. The only way we can tell is to
4443 scan forwards from a known instruction boundary. */
4444 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4445 {
4446 int definite;
4447
4448 /* There's a lot of code before this instruction. Start with an
4449 optimistic search; it's easy to recognize halfwords that can
4450 not be the start of a 32-bit instruction, and use that to
4451 lock on to the instruction boundaries. */
4452 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4453 if (buf == NULL)
4454 return bpaddr;
4455 buf_len = IT_SCAN_THRESHOLD;
4456
4457 definite = 0;
4458 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4459 {
4460 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4461 if (thumb_insn_size (inst1) == 2)
4462 {
4463 definite = 1;
4464 break;
4465 }
4466 }
4467
4468 /* At this point, if DEFINITE, BUF[I] is the first place we
4469 are sure that we know the instruction boundaries, and it is far
4470 enough from BPADDR that we could not miss an IT instruction
4471 affecting BPADDR. If ! DEFINITE, give up - start from a
4472 known boundary. */
4473 if (! definite)
4474 {
4475 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4476 bpaddr - boundary);
4477 if (buf == NULL)
4478 return bpaddr;
4479 buf_len = bpaddr - boundary;
4480 i = 0;
4481 }
4482 }
4483 else
4484 {
4485 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4486 if (buf == NULL)
4487 return bpaddr;
4488 buf_len = bpaddr - boundary;
4489 i = 0;
4490 }
4491
4492 /* Scan forwards. Find the last IT instruction before BPADDR. */
4493 last_it = -1;
4494 last_it_count = 0;
4495 while (i < buf_len)
4496 {
4497 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4498 last_it_count--;
4499 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4500 {
4501 last_it = i;
4502 if (inst1 & 0x0001)
4503 last_it_count = 4;
4504 else if (inst1 & 0x0002)
4505 last_it_count = 3;
4506 else if (inst1 & 0x0004)
4507 last_it_count = 2;
4508 else
4509 last_it_count = 1;
4510 }
4511 i += thumb_insn_size (inst1);
4512 }
4513
4514 xfree (buf);
4515
4516 if (last_it == -1)
4517 /* There wasn't really an IT instruction after all. */
4518 return bpaddr;
4519
4520 if (last_it_count < 1)
4521 /* It was too far away. */
4522 return bpaddr;
4523
4524 /* This really is a trouble spot. Move the breakpoint to the IT
4525 instruction. */
4526 return bpaddr - buf_len + last_it;
4527 }
4528
4529 /* ARM displaced stepping support.
4530
4531 Generally ARM displaced stepping works as follows:
4532
4533 1. When an instruction is to be single-stepped, it is first decoded by
4534 arm_process_displaced_insn. Depending on the type of instruction, it is
4535 then copied to a scratch location, possibly in a modified form. The
4536 copy_* set of functions performs such modification, as necessary. A
4537 breakpoint is placed after the modified instruction in the scratch space
4538 to return control to GDB. Note in particular that instructions which
4539 modify the PC will no longer do so after modification.
4540
4541 2. The instruction is single-stepped, by setting the PC to the scratch
4542 location address, and resuming. Control returns to GDB when the
4543 breakpoint is hit.
4544
4545 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4546 function used for the current instruction. This function's job is to
4547 put the CPU/memory state back to what it would have been if the
4548 instruction had been executed unmodified in its original location. */
4549
4550 /* NOP instruction (mov r0, r0). */
4551 #define ARM_NOP 0xe1a00000
4552 #define THUMB_NOP 0x4600
4553
4554 /* Helper for register reads for displaced stepping. In particular, this
4555 returns the PC as it would be seen by the instruction at its original
4556 location. */
4557
4558 ULONGEST
4559 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4560 int regno)
4561 {
4562 ULONGEST ret;
4563 CORE_ADDR from = dsc->insn_addr;
4564
4565 if (regno == ARM_PC_REGNUM)
4566 {
4567 /* Compute pipeline offset:
4568 - When executing an ARM instruction, PC reads as the address of the
4569 current instruction plus 8.
4570 - When executing a Thumb instruction, PC reads as the address of the
4571 current instruction plus 4. */
4572
4573 if (!dsc->is_thumb)
4574 from += 8;
4575 else
4576 from += 4;
4577
4578 displaced_debug_printf ("read pc value %.8lx",
4579 (unsigned long) from);
4580 return (ULONGEST) from;
4581 }
4582 else
4583 {
4584 regcache_cooked_read_unsigned (regs, regno, &ret);
4585
4586 displaced_debug_printf ("read r%d value %.8lx",
4587 regno, (unsigned long) ret);
4588
4589 return ret;
4590 }
4591 }
4592
4593 static int
4594 displaced_in_arm_mode (struct regcache *regs)
4595 {
4596 ULONGEST ps;
4597 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4598
4599 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4600
4601 return (ps & t_bit) == 0;
4602 }
4603
4604 /* Write to the PC as from a branch instruction. */
4605
4606 static void
4607 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4608 ULONGEST val)
4609 {
4610 if (!dsc->is_thumb)
4611 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4612 architecture versions < 6. */
4613 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4614 val & ~(ULONGEST) 0x3);
4615 else
4616 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4617 val & ~(ULONGEST) 0x1);
4618 }
4619
4620 /* Write to the PC as from a branch-exchange instruction. */
4621
4622 static void
4623 bx_write_pc (struct regcache *regs, ULONGEST val)
4624 {
4625 ULONGEST ps;
4626 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4627
4628 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4629
4630 if ((val & 1) == 1)
4631 {
4632 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4633 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4634 }
4635 else if ((val & 2) == 0)
4636 {
4637 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4638 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4639 }
4640 else
4641 {
4642 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4643 mode, align dest to 4 bytes). */
4644 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4645 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4646 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4647 }
4648 }
4649
4650 /* Write to the PC as if from a load instruction. */
4651
4652 static void
4653 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4654 ULONGEST val)
4655 {
4656 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4657 bx_write_pc (regs, val);
4658 else
4659 branch_write_pc (regs, dsc, val);
4660 }
4661
4662 /* Write to the PC as if from an ALU instruction. */
4663
4664 static void
4665 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4666 ULONGEST val)
4667 {
4668 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4669 bx_write_pc (regs, val);
4670 else
4671 branch_write_pc (regs, dsc, val);
4672 }
4673
4674 /* Helper for writing to registers for displaced stepping. Writing to the PC
4675 has a varying effects depending on the instruction which does the write:
4676 this is controlled by the WRITE_PC argument. */
4677
4678 void
4679 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4680 int regno, ULONGEST val, enum pc_write_style write_pc)
4681 {
4682 if (regno == ARM_PC_REGNUM)
4683 {
4684 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4685
4686 switch (write_pc)
4687 {
4688 case BRANCH_WRITE_PC:
4689 branch_write_pc (regs, dsc, val);
4690 break;
4691
4692 case BX_WRITE_PC:
4693 bx_write_pc (regs, val);
4694 break;
4695
4696 case LOAD_WRITE_PC:
4697 load_write_pc (regs, dsc, val);
4698 break;
4699
4700 case ALU_WRITE_PC:
4701 alu_write_pc (regs, dsc, val);
4702 break;
4703
4704 case CANNOT_WRITE_PC:
4705 warning (_("Instruction wrote to PC in an unexpected way when "
4706 "single-stepping"));
4707 break;
4708
4709 default:
4710 internal_error (__FILE__, __LINE__,
4711 _("Invalid argument to displaced_write_reg"));
4712 }
4713
4714 dsc->wrote_to_pc = 1;
4715 }
4716 else
4717 {
4718 displaced_debug_printf ("writing r%d value %.8lx",
4719 regno, (unsigned long) val);
4720 regcache_cooked_write_unsigned (regs, regno, val);
4721 }
4722 }
4723
4724 /* This function is used to concisely determine if an instruction INSN
4725 references PC. Register fields of interest in INSN should have the
4726 corresponding fields of BITMASK set to 0b1111. The function
4727 returns return 1 if any of these fields in INSN reference the PC
4728 (also 0b1111, r15), else it returns 0. */
4729
4730 static int
4731 insn_references_pc (uint32_t insn, uint32_t bitmask)
4732 {
4733 uint32_t lowbit = 1;
4734
4735 while (bitmask != 0)
4736 {
4737 uint32_t mask;
4738
4739 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4740 ;
4741
4742 if (!lowbit)
4743 break;
4744
4745 mask = lowbit * 0xf;
4746
4747 if ((insn & mask) == mask)
4748 return 1;
4749
4750 bitmask &= ~mask;
4751 }
4752
4753 return 0;
4754 }
4755
4756 /* The simplest copy function. Many instructions have the same effect no
4757 matter what address they are executed at: in those cases, use this. */
4758
4759 static int
4760 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
4761 arm_displaced_step_copy_insn_closure *dsc)
4762 {
4763 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4764 (unsigned long) insn, iname);
4765
4766 dsc->modinsn[0] = insn;
4767
4768 return 0;
4769 }
4770
4771 static int
4772 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4773 uint16_t insn2, const char *iname,
4774 arm_displaced_step_copy_insn_closure *dsc)
4775 {
4776 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4777 "unmodified", insn1, insn2, iname);
4778
4779 dsc->modinsn[0] = insn1;
4780 dsc->modinsn[1] = insn2;
4781 dsc->numinsns = 2;
4782
4783 return 0;
4784 }
4785
4786 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4787 modification. */
4788 static int
4789 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4790 const char *iname,
4791 arm_displaced_step_copy_insn_closure *dsc)
4792 {
4793 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4794 insn, iname);
4795
4796 dsc->modinsn[0] = insn;
4797
4798 return 0;
4799 }
4800
4801 /* Preload instructions with immediate offset. */
4802
4803 static void
4804 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
4805 arm_displaced_step_copy_insn_closure *dsc)
4806 {
4807 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4808 if (!dsc->u.preload.immed)
4809 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4810 }
4811
4812 static void
4813 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4814 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4815 {
4816 ULONGEST rn_val;
4817 /* Preload instructions:
4818
4819 {pli/pld} [rn, #+/-imm]
4820 ->
4821 {pli/pld} [r0, #+/-imm]. */
4822
4823 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4824 rn_val = displaced_read_reg (regs, dsc, rn);
4825 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4826 dsc->u.preload.immed = 1;
4827
4828 dsc->cleanup = &cleanup_preload;
4829 }
4830
4831 static int
4832 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4833 arm_displaced_step_copy_insn_closure *dsc)
4834 {
4835 unsigned int rn = bits (insn, 16, 19);
4836
4837 if (!insn_references_pc (insn, 0x000f0000ul))
4838 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4839
4840 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4841
4842 dsc->modinsn[0] = insn & 0xfff0ffff;
4843
4844 install_preload (gdbarch, regs, dsc, rn);
4845
4846 return 0;
4847 }
4848
4849 static int
4850 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4851 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4852 {
4853 unsigned int rn = bits (insn1, 0, 3);
4854 unsigned int u_bit = bit (insn1, 7);
4855 int imm12 = bits (insn2, 0, 11);
4856 ULONGEST pc_val;
4857
4858 if (rn != ARM_PC_REGNUM)
4859 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4860
4861 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4862 PLD (literal) Encoding T1. */
4863 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4864 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4865 imm12);
4866
4867 if (!u_bit)
4868 imm12 = -1 * imm12;
4869
4870 /* Rewrite instruction {pli/pld} PC imm12 into:
4871 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4872
4873 {pli/pld} [r0, r1]
4874
4875 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4876
4877 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4878 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4879
4880 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4881
4882 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4883 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4884 dsc->u.preload.immed = 0;
4885
4886 /* {pli/pld} [r0, r1] */
4887 dsc->modinsn[0] = insn1 & 0xfff0;
4888 dsc->modinsn[1] = 0xf001;
4889 dsc->numinsns = 2;
4890
4891 dsc->cleanup = &cleanup_preload;
4892 return 0;
4893 }
4894
4895 /* Preload instructions with register offset. */
4896
4897 static void
4898 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4899 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
4900 unsigned int rm)
4901 {
4902 ULONGEST rn_val, rm_val;
4903
4904 /* Preload register-offset instructions:
4905
4906 {pli/pld} [rn, rm {, shift}]
4907 ->
4908 {pli/pld} [r0, r1 {, shift}]. */
4909
4910 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4911 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4912 rn_val = displaced_read_reg (regs, dsc, rn);
4913 rm_val = displaced_read_reg (regs, dsc, rm);
4914 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4915 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4916 dsc->u.preload.immed = 0;
4917
4918 dsc->cleanup = &cleanup_preload;
4919 }
4920
4921 static int
4922 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4923 struct regcache *regs,
4924 arm_displaced_step_copy_insn_closure *dsc)
4925 {
4926 unsigned int rn = bits (insn, 16, 19);
4927 unsigned int rm = bits (insn, 0, 3);
4928
4929
4930 if (!insn_references_pc (insn, 0x000f000ful))
4931 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4932
4933 displaced_debug_printf ("copying preload insn %.8lx",
4934 (unsigned long) insn);
4935
4936 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4937
4938 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4939 return 0;
4940 }
4941
4942 /* Copy/cleanup coprocessor load and store instructions. */
4943
4944 static void
4945 cleanup_copro_load_store (struct gdbarch *gdbarch,
4946 struct regcache *regs,
4947 arm_displaced_step_copy_insn_closure *dsc)
4948 {
4949 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4950
4951 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4952
4953 if (dsc->u.ldst.writeback)
4954 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4955 }
4956
4957 static void
4958 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4959 arm_displaced_step_copy_insn_closure *dsc,
4960 int writeback, unsigned int rn)
4961 {
4962 ULONGEST rn_val;
4963
4964 /* Coprocessor load/store instructions:
4965
4966 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4967 ->
4968 {stc/stc2} [r0, #+/-imm].
4969
4970 ldc/ldc2 are handled identically. */
4971
4972 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4973 rn_val = displaced_read_reg (regs, dsc, rn);
4974 /* PC should be 4-byte aligned. */
4975 rn_val = rn_val & 0xfffffffc;
4976 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4977
4978 dsc->u.ldst.writeback = writeback;
4979 dsc->u.ldst.rn = rn;
4980
4981 dsc->cleanup = &cleanup_copro_load_store;
4982 }
4983
4984 static int
4985 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4986 struct regcache *regs,
4987 arm_displaced_step_copy_insn_closure *dsc)
4988 {
4989 unsigned int rn = bits (insn, 16, 19);
4990
4991 if (!insn_references_pc (insn, 0x000f0000ul))
4992 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4993
4994 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4995 (unsigned long) insn);
4996
4997 dsc->modinsn[0] = insn & 0xfff0ffff;
4998
4999 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5000
5001 return 0;
5002 }
5003
5004 static int
5005 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5006 uint16_t insn2, struct regcache *regs,
5007 arm_displaced_step_copy_insn_closure *dsc)
5008 {
5009 unsigned int rn = bits (insn1, 0, 3);
5010
5011 if (rn != ARM_PC_REGNUM)
5012 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5013 "copro load/store", dsc);
5014
5015 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5016 insn1, insn2);
5017
5018 dsc->modinsn[0] = insn1 & 0xfff0;
5019 dsc->modinsn[1] = insn2;
5020 dsc->numinsns = 2;
5021
5022 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5023 doesn't support writeback, so pass 0. */
5024 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5025
5026 return 0;
5027 }
5028
5029 /* Clean up branch instructions (actually perform the branch, by setting
5030 PC). */
5031
5032 static void
5033 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5034 arm_displaced_step_copy_insn_closure *dsc)
5035 {
5036 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5037 int branch_taken = condition_true (dsc->u.branch.cond, status);
5038 enum pc_write_style write_pc = dsc->u.branch.exchange
5039 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5040
5041 if (!branch_taken)
5042 return;
5043
5044 if (dsc->u.branch.link)
5045 {
5046 /* The value of LR should be the next insn of current one. In order
5047 not to confuse logic handling later insn `bx lr', if current insn mode
5048 is Thumb, the bit 0 of LR value should be set to 1. */
5049 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5050
5051 if (dsc->is_thumb)
5052 next_insn_addr |= 0x1;
5053
5054 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5055 CANNOT_WRITE_PC);
5056 }
5057
5058 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5059 }
5060
5061 /* Copy B/BL/BLX instructions with immediate destinations. */
5062
5063 static void
5064 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5065 arm_displaced_step_copy_insn_closure *dsc,
5066 unsigned int cond, int exchange, int link, long offset)
5067 {
5068 /* Implement "BL<cond> <label>" as:
5069
5070 Preparation: cond <- instruction condition
5071 Insn: mov r0, r0 (nop)
5072 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5073
5074 B<cond> similar, but don't set r14 in cleanup. */
5075
5076 dsc->u.branch.cond = cond;
5077 dsc->u.branch.link = link;
5078 dsc->u.branch.exchange = exchange;
5079
5080 dsc->u.branch.dest = dsc->insn_addr;
5081 if (link && exchange)
5082 /* For BLX, offset is computed from the Align (PC, 4). */
5083 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5084
5085 if (dsc->is_thumb)
5086 dsc->u.branch.dest += 4 + offset;
5087 else
5088 dsc->u.branch.dest += 8 + offset;
5089
5090 dsc->cleanup = &cleanup_branch;
5091 }
5092 static int
5093 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5094 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5095 {
5096 unsigned int cond = bits (insn, 28, 31);
5097 int exchange = (cond == 0xf);
5098 int link = exchange || bit (insn, 24);
5099 long offset;
5100
5101 displaced_debug_printf ("copying %s immediate insn %.8lx",
5102 (exchange) ? "blx" : (link) ? "bl" : "b",
5103 (unsigned long) insn);
5104 if (exchange)
5105 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5106 then arrange the switch into Thumb mode. */
5107 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5108 else
5109 offset = bits (insn, 0, 23) << 2;
5110
5111 if (bit (offset, 25))
5112 offset = offset | ~0x3ffffff;
5113
5114 dsc->modinsn[0] = ARM_NOP;
5115
5116 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5117 return 0;
5118 }
5119
5120 static int
5121 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5122 uint16_t insn2, struct regcache *regs,
5123 arm_displaced_step_copy_insn_closure *dsc)
5124 {
5125 int link = bit (insn2, 14);
5126 int exchange = link && !bit (insn2, 12);
5127 int cond = INST_AL;
5128 long offset = 0;
5129 int j1 = bit (insn2, 13);
5130 int j2 = bit (insn2, 11);
5131 int s = sbits (insn1, 10, 10);
5132 int i1 = !(j1 ^ bit (insn1, 10));
5133 int i2 = !(j2 ^ bit (insn1, 10));
5134
5135 if (!link && !exchange) /* B */
5136 {
5137 offset = (bits (insn2, 0, 10) << 1);
5138 if (bit (insn2, 12)) /* Encoding T4 */
5139 {
5140 offset |= (bits (insn1, 0, 9) << 12)
5141 | (i2 << 22)
5142 | (i1 << 23)
5143 | (s << 24);
5144 cond = INST_AL;
5145 }
5146 else /* Encoding T3 */
5147 {
5148 offset |= (bits (insn1, 0, 5) << 12)
5149 | (j1 << 18)
5150 | (j2 << 19)
5151 | (s << 20);
5152 cond = bits (insn1, 6, 9);
5153 }
5154 }
5155 else
5156 {
5157 offset = (bits (insn1, 0, 9) << 12);
5158 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5159 offset |= exchange ?
5160 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5161 }
5162
5163 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5164 link ? (exchange) ? "blx" : "bl" : "b",
5165 insn1, insn2, offset);
5166
5167 dsc->modinsn[0] = THUMB_NOP;
5168
5169 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5170 return 0;
5171 }
5172
5173 /* Copy B Thumb instructions. */
5174 static int
5175 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5176 arm_displaced_step_copy_insn_closure *dsc)
5177 {
5178 unsigned int cond = 0;
5179 int offset = 0;
5180 unsigned short bit_12_15 = bits (insn, 12, 15);
5181 CORE_ADDR from = dsc->insn_addr;
5182
5183 if (bit_12_15 == 0xd)
5184 {
5185 /* offset = SignExtend (imm8:0, 32) */
5186 offset = sbits ((insn << 1), 0, 8);
5187 cond = bits (insn, 8, 11);
5188 }
5189 else if (bit_12_15 == 0xe) /* Encoding T2 */
5190 {
5191 offset = sbits ((insn << 1), 0, 11);
5192 cond = INST_AL;
5193 }
5194
5195 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5196 insn, offset);
5197
5198 dsc->u.branch.cond = cond;
5199 dsc->u.branch.link = 0;
5200 dsc->u.branch.exchange = 0;
5201 dsc->u.branch.dest = from + 4 + offset;
5202
5203 dsc->modinsn[0] = THUMB_NOP;
5204
5205 dsc->cleanup = &cleanup_branch;
5206
5207 return 0;
5208 }
5209
5210 /* Copy BX/BLX with register-specified destinations. */
5211
5212 static void
5213 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5214 arm_displaced_step_copy_insn_closure *dsc, int link,
5215 unsigned int cond, unsigned int rm)
5216 {
5217 /* Implement {BX,BLX}<cond> <reg>" as:
5218
5219 Preparation: cond <- instruction condition
5220 Insn: mov r0, r0 (nop)
5221 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5222
5223 Don't set r14 in cleanup for BX. */
5224
5225 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5226
5227 dsc->u.branch.cond = cond;
5228 dsc->u.branch.link = link;
5229
5230 dsc->u.branch.exchange = 1;
5231
5232 dsc->cleanup = &cleanup_branch;
5233 }
5234
5235 static int
5236 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5237 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5238 {
5239 unsigned int cond = bits (insn, 28, 31);
5240 /* BX: x12xxx1x
5241 BLX: x12xxx3x. */
5242 int link = bit (insn, 5);
5243 unsigned int rm = bits (insn, 0, 3);
5244
5245 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5246
5247 dsc->modinsn[0] = ARM_NOP;
5248
5249 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5250 return 0;
5251 }
5252
5253 static int
5254 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 arm_displaced_step_copy_insn_closure *dsc)
5257 {
5258 int link = bit (insn, 7);
5259 unsigned int rm = bits (insn, 3, 6);
5260
5261 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5262
5263 dsc->modinsn[0] = THUMB_NOP;
5264
5265 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5266
5267 return 0;
5268 }
5269
5270
5271 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5272
5273 static void
5274 cleanup_alu_imm (struct gdbarch *gdbarch,
5275 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5276 {
5277 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5278 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5279 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5280 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5281 }
5282
5283 static int
5284 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5285 arm_displaced_step_copy_insn_closure *dsc)
5286 {
5287 unsigned int rn = bits (insn, 16, 19);
5288 unsigned int rd = bits (insn, 12, 15);
5289 unsigned int op = bits (insn, 21, 24);
5290 int is_mov = (op == 0xd);
5291 ULONGEST rd_val, rn_val;
5292
5293 if (!insn_references_pc (insn, 0x000ff000ul))
5294 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5295
5296 displaced_debug_printf ("copying immediate %s insn %.8lx",
5297 is_mov ? "move" : "ALU",
5298 (unsigned long) insn);
5299
5300 /* Instruction is of form:
5301
5302 <op><cond> rd, [rn,] #imm
5303
5304 Rewrite as:
5305
5306 Preparation: tmp1, tmp2 <- r0, r1;
5307 r0, r1 <- rd, rn
5308 Insn: <op><cond> r0, r1, #imm
5309 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5310 */
5311
5312 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5313 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5314 rn_val = displaced_read_reg (regs, dsc, rn);
5315 rd_val = displaced_read_reg (regs, dsc, rd);
5316 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5317 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5318 dsc->rd = rd;
5319
5320 if (is_mov)
5321 dsc->modinsn[0] = insn & 0xfff00fff;
5322 else
5323 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5324
5325 dsc->cleanup = &cleanup_alu_imm;
5326
5327 return 0;
5328 }
5329
5330 static int
5331 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5332 uint16_t insn2, struct regcache *regs,
5333 arm_displaced_step_copy_insn_closure *dsc)
5334 {
5335 unsigned int op = bits (insn1, 5, 8);
5336 unsigned int rn, rm, rd;
5337 ULONGEST rd_val, rn_val;
5338
5339 rn = bits (insn1, 0, 3); /* Rn */
5340 rm = bits (insn2, 0, 3); /* Rm */
5341 rd = bits (insn2, 8, 11); /* Rd */
5342
5343 /* This routine is only called for instruction MOV. */
5344 gdb_assert (op == 0x2 && rn == 0xf);
5345
5346 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5347 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5348
5349 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5350
5351 /* Instruction is of form:
5352
5353 <op><cond> rd, [rn,] #imm
5354
5355 Rewrite as:
5356
5357 Preparation: tmp1, tmp2 <- r0, r1;
5358 r0, r1 <- rd, rn
5359 Insn: <op><cond> r0, r1, #imm
5360 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5361 */
5362
5363 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5364 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5365 rn_val = displaced_read_reg (regs, dsc, rn);
5366 rd_val = displaced_read_reg (regs, dsc, rd);
5367 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5368 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5369 dsc->rd = rd;
5370
5371 dsc->modinsn[0] = insn1;
5372 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5373 dsc->numinsns = 2;
5374
5375 dsc->cleanup = &cleanup_alu_imm;
5376
5377 return 0;
5378 }
5379
5380 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5381
5382 static void
5383 cleanup_alu_reg (struct gdbarch *gdbarch,
5384 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5385 {
5386 ULONGEST rd_val;
5387 int i;
5388
5389 rd_val = displaced_read_reg (regs, dsc, 0);
5390
5391 for (i = 0; i < 3; i++)
5392 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5393
5394 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5395 }
5396
5397 static void
5398 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5399 arm_displaced_step_copy_insn_closure *dsc,
5400 unsigned int rd, unsigned int rn, unsigned int rm)
5401 {
5402 ULONGEST rd_val, rn_val, rm_val;
5403
5404 /* Instruction is of form:
5405
5406 <op><cond> rd, [rn,] rm [, <shift>]
5407
5408 Rewrite as:
5409
5410 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5411 r0, r1, r2 <- rd, rn, rm
5412 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5413 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5414 */
5415
5416 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5417 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5418 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5419 rd_val = displaced_read_reg (regs, dsc, rd);
5420 rn_val = displaced_read_reg (regs, dsc, rn);
5421 rm_val = displaced_read_reg (regs, dsc, rm);
5422 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5423 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5424 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5425 dsc->rd = rd;
5426
5427 dsc->cleanup = &cleanup_alu_reg;
5428 }
5429
5430 static int
5431 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5432 arm_displaced_step_copy_insn_closure *dsc)
5433 {
5434 unsigned int op = bits (insn, 21, 24);
5435 int is_mov = (op == 0xd);
5436
5437 if (!insn_references_pc (insn, 0x000ff00ful))
5438 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5439
5440 displaced_debug_printf ("copying reg %s insn %.8lx",
5441 is_mov ? "move" : "ALU", (unsigned long) insn);
5442
5443 if (is_mov)
5444 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5445 else
5446 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5447
5448 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5449 bits (insn, 0, 3));
5450 return 0;
5451 }
5452
5453 static int
5454 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5455 struct regcache *regs,
5456 arm_displaced_step_copy_insn_closure *dsc)
5457 {
5458 unsigned rm, rd;
5459
5460 rm = bits (insn, 3, 6);
5461 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5462
5463 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5464 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5465
5466 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5467
5468 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5469
5470 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5471
5472 return 0;
5473 }
5474
5475 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5476
5477 static void
5478 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5479 struct regcache *regs,
5480 arm_displaced_step_copy_insn_closure *dsc)
5481 {
5482 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5483 int i;
5484
5485 for (i = 0; i < 4; i++)
5486 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5487
5488 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5489 }
5490
5491 static void
5492 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5493 arm_displaced_step_copy_insn_closure *dsc,
5494 unsigned int rd, unsigned int rn, unsigned int rm,
5495 unsigned rs)
5496 {
5497 int i;
5498 ULONGEST rd_val, rn_val, rm_val, rs_val;
5499
5500 /* Instruction is of form:
5501
5502 <op><cond> rd, [rn,] rm, <shift> rs
5503
5504 Rewrite as:
5505
5506 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5507 r0, r1, r2, r3 <- rd, rn, rm, rs
5508 Insn: <op><cond> r0, r1, r2, <shift> r3
5509 Cleanup: tmp5 <- r0
5510 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5511 rd <- tmp5
5512 */
5513
5514 for (i = 0; i < 4; i++)
5515 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5516
5517 rd_val = displaced_read_reg (regs, dsc, rd);
5518 rn_val = displaced_read_reg (regs, dsc, rn);
5519 rm_val = displaced_read_reg (regs, dsc, rm);
5520 rs_val = displaced_read_reg (regs, dsc, rs);
5521 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5522 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5523 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5524 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5525 dsc->rd = rd;
5526 dsc->cleanup = &cleanup_alu_shifted_reg;
5527 }
5528
5529 static int
5530 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5531 struct regcache *regs,
5532 arm_displaced_step_copy_insn_closure *dsc)
5533 {
5534 unsigned int op = bits (insn, 21, 24);
5535 int is_mov = (op == 0xd);
5536 unsigned int rd, rn, rm, rs;
5537
5538 if (!insn_references_pc (insn, 0x000fff0ful))
5539 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5540
5541 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5542 is_mov ? "move" : "ALU",
5543 (unsigned long) insn);
5544
5545 rn = bits (insn, 16, 19);
5546 rm = bits (insn, 0, 3);
5547 rs = bits (insn, 8, 11);
5548 rd = bits (insn, 12, 15);
5549
5550 if (is_mov)
5551 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5552 else
5553 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5554
5555 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5556
5557 return 0;
5558 }
5559
5560 /* Clean up load instructions. */
5561
5562 static void
5563 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5564 arm_displaced_step_copy_insn_closure *dsc)
5565 {
5566 ULONGEST rt_val, rt_val2 = 0, rn_val;
5567
5568 rt_val = displaced_read_reg (regs, dsc, 0);
5569 if (dsc->u.ldst.xfersize == 8)
5570 rt_val2 = displaced_read_reg (regs, dsc, 1);
5571 rn_val = displaced_read_reg (regs, dsc, 2);
5572
5573 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5574 if (dsc->u.ldst.xfersize > 4)
5575 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5576 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5577 if (!dsc->u.ldst.immed)
5578 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5579
5580 /* Handle register writeback. */
5581 if (dsc->u.ldst.writeback)
5582 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5583 /* Put result in right place. */
5584 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5585 if (dsc->u.ldst.xfersize == 8)
5586 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5587 }
5588
5589 /* Clean up store instructions. */
5590
5591 static void
5592 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5593 arm_displaced_step_copy_insn_closure *dsc)
5594 {
5595 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5596
5597 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5598 if (dsc->u.ldst.xfersize > 4)
5599 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5600 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5601 if (!dsc->u.ldst.immed)
5602 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5603 if (!dsc->u.ldst.restore_r4)
5604 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5605
5606 /* Writeback. */
5607 if (dsc->u.ldst.writeback)
5608 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5609 }
5610
5611 /* Copy "extra" load/store instructions. These are halfword/doubleword
5612 transfers, which have a different encoding to byte/word transfers. */
5613
5614 static int
5615 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5616 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5617 {
5618 unsigned int op1 = bits (insn, 20, 24);
5619 unsigned int op2 = bits (insn, 5, 6);
5620 unsigned int rt = bits (insn, 12, 15);
5621 unsigned int rn = bits (insn, 16, 19);
5622 unsigned int rm = bits (insn, 0, 3);
5623 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5624 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5625 int immed = (op1 & 0x4) != 0;
5626 int opcode;
5627 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5628
5629 if (!insn_references_pc (insn, 0x000ff00ful))
5630 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5631
5632 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5633 unprivileged ? "unprivileged " : "",
5634 (unsigned long) insn);
5635
5636 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5637
5638 if (opcode < 0)
5639 internal_error (__FILE__, __LINE__,
5640 _("copy_extra_ld_st: instruction decode error"));
5641
5642 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5643 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5644 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5645 if (!immed)
5646 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5647
5648 rt_val = displaced_read_reg (regs, dsc, rt);
5649 if (bytesize[opcode] == 8)
5650 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5651 rn_val = displaced_read_reg (regs, dsc, rn);
5652 if (!immed)
5653 rm_val = displaced_read_reg (regs, dsc, rm);
5654
5655 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5656 if (bytesize[opcode] == 8)
5657 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5658 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5659 if (!immed)
5660 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5661
5662 dsc->rd = rt;
5663 dsc->u.ldst.xfersize = bytesize[opcode];
5664 dsc->u.ldst.rn = rn;
5665 dsc->u.ldst.immed = immed;
5666 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5667 dsc->u.ldst.restore_r4 = 0;
5668
5669 if (immed)
5670 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5671 ->
5672 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5673 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5674 else
5675 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5676 ->
5677 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5678 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5679
5680 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5681
5682 return 0;
5683 }
5684
5685 /* Copy byte/half word/word loads and stores. */
5686
5687 static void
5688 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5689 arm_displaced_step_copy_insn_closure *dsc, int load,
5690 int immed, int writeback, int size, int usermode,
5691 int rt, int rm, int rn)
5692 {
5693 ULONGEST rt_val, rn_val, rm_val = 0;
5694
5695 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5696 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5697 if (!immed)
5698 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5699 if (!load)
5700 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5701
5702 rt_val = displaced_read_reg (regs, dsc, rt);
5703 rn_val = displaced_read_reg (regs, dsc, rn);
5704 if (!immed)
5705 rm_val = displaced_read_reg (regs, dsc, rm);
5706
5707 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5708 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5709 if (!immed)
5710 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5711 dsc->rd = rt;
5712 dsc->u.ldst.xfersize = size;
5713 dsc->u.ldst.rn = rn;
5714 dsc->u.ldst.immed = immed;
5715 dsc->u.ldst.writeback = writeback;
5716
5717 /* To write PC we can do:
5718
5719 Before this sequence of instructions:
5720 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5721 r2 is the Rn value got from displaced_read_reg.
5722
5723 Insn1: push {pc} Write address of STR instruction + offset on stack
5724 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5725 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5726 = addr(Insn1) + offset - addr(Insn3) - 8
5727 = offset - 16
5728 Insn4: add r4, r4, #8 r4 = offset - 8
5729 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5730 = from + offset
5731 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5732
5733 Otherwise we don't know what value to write for PC, since the offset is
5734 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5735 of this can be found in Section "Saving from r15" in
5736 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5737
5738 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5739 }
5740
5741
5742 static int
5743 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5744 uint16_t insn2, struct regcache *regs,
5745 arm_displaced_step_copy_insn_closure *dsc, int size)
5746 {
5747 unsigned int u_bit = bit (insn1, 7);
5748 unsigned int rt = bits (insn2, 12, 15);
5749 int imm12 = bits (insn2, 0, 11);
5750 ULONGEST pc_val;
5751
5752 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5753 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5754 imm12);
5755
5756 if (!u_bit)
5757 imm12 = -1 * imm12;
5758
5759 /* Rewrite instruction LDR Rt imm12 into:
5760
5761 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5762
5763 LDR R0, R2, R3,
5764
5765 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5766
5767
5768 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5769 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5770 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5771
5772 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5773
5774 pc_val = pc_val & 0xfffffffc;
5775
5776 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5777 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5778
5779 dsc->rd = rt;
5780
5781 dsc->u.ldst.xfersize = size;
5782 dsc->u.ldst.immed = 0;
5783 dsc->u.ldst.writeback = 0;
5784 dsc->u.ldst.restore_r4 = 0;
5785
5786 /* LDR R0, R2, R3 */
5787 dsc->modinsn[0] = 0xf852;
5788 dsc->modinsn[1] = 0x3;
5789 dsc->numinsns = 2;
5790
5791 dsc->cleanup = &cleanup_load;
5792
5793 return 0;
5794 }
5795
5796 static int
5797 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5798 uint16_t insn2, struct regcache *regs,
5799 arm_displaced_step_copy_insn_closure *dsc,
5800 int writeback, int immed)
5801 {
5802 unsigned int rt = bits (insn2, 12, 15);
5803 unsigned int rn = bits (insn1, 0, 3);
5804 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5805 /* In LDR (register), there is also a register Rm, which is not allowed to
5806 be PC, so we don't have to check it. */
5807
5808 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5809 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5810 dsc);
5811
5812 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5813 rt, rn, insn1, insn2);
5814
5815 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5816 0, rt, rm, rn);
5817
5818 dsc->u.ldst.restore_r4 = 0;
5819
5820 if (immed)
5821 /* ldr[b]<cond> rt, [rn, #imm], etc.
5822 ->
5823 ldr[b]<cond> r0, [r2, #imm]. */
5824 {
5825 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5826 dsc->modinsn[1] = insn2 & 0x0fff;
5827 }
5828 else
5829 /* ldr[b]<cond> rt, [rn, rm], etc.
5830 ->
5831 ldr[b]<cond> r0, [r2, r3]. */
5832 {
5833 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5834 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5835 }
5836
5837 dsc->numinsns = 2;
5838
5839 return 0;
5840 }
5841
5842
5843 static int
5844 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5845 struct regcache *regs,
5846 arm_displaced_step_copy_insn_closure *dsc,
5847 int load, int size, int usermode)
5848 {
5849 int immed = !bit (insn, 25);
5850 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5851 unsigned int rt = bits (insn, 12, 15);
5852 unsigned int rn = bits (insn, 16, 19);
5853 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5854
5855 if (!insn_references_pc (insn, 0x000ff00ful))
5856 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5857
5858 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5859 load ? (size == 1 ? "ldrb" : "ldr")
5860 : (size == 1 ? "strb" : "str"),
5861 usermode ? "t" : "",
5862 rt, rn,
5863 (unsigned long) insn);
5864
5865 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5866 usermode, rt, rm, rn);
5867
5868 if (load || rt != ARM_PC_REGNUM)
5869 {
5870 dsc->u.ldst.restore_r4 = 0;
5871
5872 if (immed)
5873 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5874 ->
5875 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5876 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5877 else
5878 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5879 ->
5880 {ldr,str}[b]<cond> r0, [r2, r3]. */
5881 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5882 }
5883 else
5884 {
5885 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5886 dsc->u.ldst.restore_r4 = 1;
5887 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5888 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5889 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5890 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5891 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5892
5893 /* As above. */
5894 if (immed)
5895 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5896 else
5897 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5898
5899 dsc->numinsns = 6;
5900 }
5901
5902 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5903
5904 return 0;
5905 }
5906
5907 /* Cleanup LDM instructions with fully-populated register list. This is an
5908 unfortunate corner case: it's impossible to implement correctly by modifying
5909 the instruction. The issue is as follows: we have an instruction,
5910
5911 ldm rN, {r0-r15}
5912
5913 which we must rewrite to avoid loading PC. A possible solution would be to
5914 do the load in two halves, something like (with suitable cleanup
5915 afterwards):
5916
5917 mov r8, rN
5918 ldm[id][ab] r8!, {r0-r7}
5919 str r7, <temp>
5920 ldm[id][ab] r8, {r7-r14}
5921 <bkpt>
5922
5923 but at present there's no suitable place for <temp>, since the scratch space
5924 is overwritten before the cleanup routine is called. For now, we simply
5925 emulate the instruction. */
5926
5927 static void
5928 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5929 arm_displaced_step_copy_insn_closure *dsc)
5930 {
5931 int inc = dsc->u.block.increment;
5932 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5933 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5934 uint32_t regmask = dsc->u.block.regmask;
5935 int regno = inc ? 0 : 15;
5936 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5937 int exception_return = dsc->u.block.load && dsc->u.block.user
5938 && (regmask & 0x8000) != 0;
5939 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5940 int do_transfer = condition_true (dsc->u.block.cond, status);
5941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5942
5943 if (!do_transfer)
5944 return;
5945
5946 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5947 sensible we can do here. Complain loudly. */
5948 if (exception_return)
5949 error (_("Cannot single-step exception return"));
5950
5951 /* We don't handle any stores here for now. */
5952 gdb_assert (dsc->u.block.load != 0);
5953
5954 displaced_debug_printf ("emulating block transfer: %s %s %s",
5955 dsc->u.block.load ? "ldm" : "stm",
5956 dsc->u.block.increment ? "inc" : "dec",
5957 dsc->u.block.before ? "before" : "after");
5958
5959 while (regmask)
5960 {
5961 uint32_t memword;
5962
5963 if (inc)
5964 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5965 regno++;
5966 else
5967 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5968 regno--;
5969
5970 xfer_addr += bump_before;
5971
5972 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5973 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5974
5975 xfer_addr += bump_after;
5976
5977 regmask &= ~(1 << regno);
5978 }
5979
5980 if (dsc->u.block.writeback)
5981 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5982 CANNOT_WRITE_PC);
5983 }
5984
5985 /* Clean up an STM which included the PC in the register list. */
5986
5987 static void
5988 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5989 arm_displaced_step_copy_insn_closure *dsc)
5990 {
5991 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5992 int store_executed = condition_true (dsc->u.block.cond, status);
5993 CORE_ADDR pc_stored_at, transferred_regs
5994 = count_one_bits (dsc->u.block.regmask);
5995 CORE_ADDR stm_insn_addr;
5996 uint32_t pc_val;
5997 long offset;
5998 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5999
6000 /* If condition code fails, there's nothing else to do. */
6001 if (!store_executed)
6002 return;
6003
6004 if (dsc->u.block.increment)
6005 {
6006 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6007
6008 if (dsc->u.block.before)
6009 pc_stored_at += 4;
6010 }
6011 else
6012 {
6013 pc_stored_at = dsc->u.block.xfer_addr;
6014
6015 if (dsc->u.block.before)
6016 pc_stored_at -= 4;
6017 }
6018
6019 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6020 stm_insn_addr = dsc->scratch_base;
6021 offset = pc_val - stm_insn_addr;
6022
6023 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6024 offset);
6025
6026 /* Rewrite the stored PC to the proper value for the non-displaced original
6027 instruction. */
6028 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6029 dsc->insn_addr + offset);
6030 }
6031
6032 /* Clean up an LDM which includes the PC in the register list. We clumped all
6033 the registers in the transferred list into a contiguous range r0...rX (to
6034 avoid loading PC directly and losing control of the debugged program), so we
6035 must undo that here. */
6036
6037 static void
6038 cleanup_block_load_pc (struct gdbarch *gdbarch,
6039 struct regcache *regs,
6040 arm_displaced_step_copy_insn_closure *dsc)
6041 {
6042 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6043 int load_executed = condition_true (dsc->u.block.cond, status);
6044 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6045 unsigned int regs_loaded = count_one_bits (mask);
6046 unsigned int num_to_shuffle = regs_loaded, clobbered;
6047
6048 /* The method employed here will fail if the register list is fully populated
6049 (we need to avoid loading PC directly). */
6050 gdb_assert (num_to_shuffle < 16);
6051
6052 if (!load_executed)
6053 return;
6054
6055 clobbered = (1 << num_to_shuffle) - 1;
6056
6057 while (num_to_shuffle > 0)
6058 {
6059 if ((mask & (1 << write_reg)) != 0)
6060 {
6061 unsigned int read_reg = num_to_shuffle - 1;
6062
6063 if (read_reg != write_reg)
6064 {
6065 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6066 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6067 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6068 read_reg, write_reg);
6069 }
6070 else
6071 displaced_debug_printf ("LDM: register r%d already in the right "
6072 "place", write_reg);
6073
6074 clobbered &= ~(1 << write_reg);
6075
6076 num_to_shuffle--;
6077 }
6078
6079 write_reg--;
6080 }
6081
6082 /* Restore any registers we scribbled over. */
6083 for (write_reg = 0; clobbered != 0; write_reg++)
6084 {
6085 if ((clobbered & (1 << write_reg)) != 0)
6086 {
6087 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6088 CANNOT_WRITE_PC);
6089 displaced_debug_printf ("LDM: restored clobbered register r%d",
6090 write_reg);
6091 clobbered &= ~(1 << write_reg);
6092 }
6093 }
6094
6095 /* Perform register writeback manually. */
6096 if (dsc->u.block.writeback)
6097 {
6098 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6099
6100 if (dsc->u.block.increment)
6101 new_rn_val += regs_loaded * 4;
6102 else
6103 new_rn_val -= regs_loaded * 4;
6104
6105 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6106 CANNOT_WRITE_PC);
6107 }
6108 }
6109
6110 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6111 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6112
6113 static int
6114 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6115 struct regcache *regs,
6116 arm_displaced_step_copy_insn_closure *dsc)
6117 {
6118 int load = bit (insn, 20);
6119 int user = bit (insn, 22);
6120 int increment = bit (insn, 23);
6121 int before = bit (insn, 24);
6122 int writeback = bit (insn, 21);
6123 int rn = bits (insn, 16, 19);
6124
6125 /* Block transfers which don't mention PC can be run directly
6126 out-of-line. */
6127 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6128 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6129
6130 if (rn == ARM_PC_REGNUM)
6131 {
6132 warning (_("displaced: Unpredictable LDM or STM with "
6133 "base register r15"));
6134 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6135 }
6136
6137 displaced_debug_printf ("copying block transfer insn %.8lx",
6138 (unsigned long) insn);
6139
6140 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6141 dsc->u.block.rn = rn;
6142
6143 dsc->u.block.load = load;
6144 dsc->u.block.user = user;
6145 dsc->u.block.increment = increment;
6146 dsc->u.block.before = before;
6147 dsc->u.block.writeback = writeback;
6148 dsc->u.block.cond = bits (insn, 28, 31);
6149
6150 dsc->u.block.regmask = insn & 0xffff;
6151
6152 if (load)
6153 {
6154 if ((insn & 0xffff) == 0xffff)
6155 {
6156 /* LDM with a fully-populated register list. This case is
6157 particularly tricky. Implement for now by fully emulating the
6158 instruction (which might not behave perfectly in all cases, but
6159 these instructions should be rare enough for that not to matter
6160 too much). */
6161 dsc->modinsn[0] = ARM_NOP;
6162
6163 dsc->cleanup = &cleanup_block_load_all;
6164 }
6165 else
6166 {
6167 /* LDM of a list of registers which includes PC. Implement by
6168 rewriting the list of registers to be transferred into a
6169 contiguous chunk r0...rX before doing the transfer, then shuffling
6170 registers into the correct places in the cleanup routine. */
6171 unsigned int regmask = insn & 0xffff;
6172 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6173 unsigned int i;
6174
6175 for (i = 0; i < num_in_list; i++)
6176 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6177
6178 /* Writeback makes things complicated. We need to avoid clobbering
6179 the base register with one of the registers in our modified
6180 register list, but just using a different register can't work in
6181 all cases, e.g.:
6182
6183 ldm r14!, {r0-r13,pc}
6184
6185 which would need to be rewritten as:
6186
6187 ldm rN!, {r0-r14}
6188
6189 but that can't work, because there's no free register for N.
6190
6191 Solve this by turning off the writeback bit, and emulating
6192 writeback manually in the cleanup routine. */
6193
6194 if (writeback)
6195 insn &= ~(1 << 21);
6196
6197 new_regmask = (1 << num_in_list) - 1;
6198
6199 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6200 "%.4x, modified list %.4x",
6201 rn, writeback ? "!" : "",
6202 (int) insn & 0xffff, new_regmask);
6203
6204 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6205
6206 dsc->cleanup = &cleanup_block_load_pc;
6207 }
6208 }
6209 else
6210 {
6211 /* STM of a list of registers which includes PC. Run the instruction
6212 as-is, but out of line: this will store the wrong value for the PC,
6213 so we must manually fix up the memory in the cleanup routine.
6214 Doing things this way has the advantage that we can auto-detect
6215 the offset of the PC write (which is architecture-dependent) in
6216 the cleanup routine. */
6217 dsc->modinsn[0] = insn;
6218
6219 dsc->cleanup = &cleanup_block_store_pc;
6220 }
6221
6222 return 0;
6223 }
6224
6225 static int
6226 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6227 struct regcache *regs,
6228 arm_displaced_step_copy_insn_closure *dsc)
6229 {
6230 int rn = bits (insn1, 0, 3);
6231 int load = bit (insn1, 4);
6232 int writeback = bit (insn1, 5);
6233
6234 /* Block transfers which don't mention PC can be run directly
6235 out-of-line. */
6236 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6237 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6238
6239 if (rn == ARM_PC_REGNUM)
6240 {
6241 warning (_("displaced: Unpredictable LDM or STM with "
6242 "base register r15"));
6243 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6244 "unpredictable ldm/stm", dsc);
6245 }
6246
6247 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6248 insn1, insn2);
6249
6250 /* Clear bit 13, since it should be always zero. */
6251 dsc->u.block.regmask = (insn2 & 0xdfff);
6252 dsc->u.block.rn = rn;
6253
6254 dsc->u.block.load = load;
6255 dsc->u.block.user = 0;
6256 dsc->u.block.increment = bit (insn1, 7);
6257 dsc->u.block.before = bit (insn1, 8);
6258 dsc->u.block.writeback = writeback;
6259 dsc->u.block.cond = INST_AL;
6260 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6261
6262 if (load)
6263 {
6264 if (dsc->u.block.regmask == 0xffff)
6265 {
6266 /* This branch is impossible to happen. */
6267 gdb_assert (0);
6268 }
6269 else
6270 {
6271 unsigned int regmask = dsc->u.block.regmask;
6272 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6273 unsigned int i;
6274
6275 for (i = 0; i < num_in_list; i++)
6276 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6277
6278 if (writeback)
6279 insn1 &= ~(1 << 5);
6280
6281 new_regmask = (1 << num_in_list) - 1;
6282
6283 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6284 "%.4x, modified list %.4x",
6285 rn, writeback ? "!" : "",
6286 (int) dsc->u.block.regmask, new_regmask);
6287
6288 dsc->modinsn[0] = insn1;
6289 dsc->modinsn[1] = (new_regmask & 0xffff);
6290 dsc->numinsns = 2;
6291
6292 dsc->cleanup = &cleanup_block_load_pc;
6293 }
6294 }
6295 else
6296 {
6297 dsc->modinsn[0] = insn1;
6298 dsc->modinsn[1] = insn2;
6299 dsc->numinsns = 2;
6300 dsc->cleanup = &cleanup_block_store_pc;
6301 }
6302 return 0;
6303 }
6304
6305 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6306 This is used to avoid a dependency on BFD's bfd_endian enum. */
6307
6308 ULONGEST
6309 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6310 int byte_order)
6311 {
6312 return read_memory_unsigned_integer (memaddr, len,
6313 (enum bfd_endian) byte_order);
6314 }
6315
6316 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6317
6318 CORE_ADDR
6319 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6320 CORE_ADDR val)
6321 {
6322 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6323 }
6324
6325 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6326
6327 static CORE_ADDR
6328 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6329 {
6330 return 0;
6331 }
6332
6333 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6334
6335 int
6336 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6337 {
6338 return arm_is_thumb (self->regcache);
6339 }
6340
6341 /* single_step() is called just before we want to resume the inferior,
6342 if we want to single-step it but there is no hardware or kernel
6343 single-step support. We find the target of the coming instructions
6344 and breakpoint them. */
6345
6346 std::vector<CORE_ADDR>
6347 arm_software_single_step (struct regcache *regcache)
6348 {
6349 struct gdbarch *gdbarch = regcache->arch ();
6350 struct arm_get_next_pcs next_pcs_ctx;
6351
6352 arm_get_next_pcs_ctor (&next_pcs_ctx,
6353 &arm_get_next_pcs_ops,
6354 gdbarch_byte_order (gdbarch),
6355 gdbarch_byte_order_for_code (gdbarch),
6356 0,
6357 regcache);
6358
6359 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6360
6361 for (CORE_ADDR &pc_ref : next_pcs)
6362 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6363
6364 return next_pcs;
6365 }
6366
6367 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6368 for Linux, where some SVC instructions must be treated specially. */
6369
6370 static void
6371 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6372 arm_displaced_step_copy_insn_closure *dsc)
6373 {
6374 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6375
6376 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6377 (unsigned long) resume_addr);
6378
6379 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6380 }
6381
6382
6383 /* Common copy routine for svc instruction. */
6384
6385 static int
6386 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6387 arm_displaced_step_copy_insn_closure *dsc)
6388 {
6389 /* Preparation: none.
6390 Insn: unmodified svc.
6391 Cleanup: pc <- insn_addr + insn_size. */
6392
6393 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6394 instruction. */
6395 dsc->wrote_to_pc = 1;
6396
6397 /* Allow OS-specific code to override SVC handling. */
6398 if (dsc->u.svc.copy_svc_os)
6399 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6400 else
6401 {
6402 dsc->cleanup = &cleanup_svc;
6403 return 0;
6404 }
6405 }
6406
6407 static int
6408 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6409 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6410 {
6411
6412 displaced_debug_printf ("copying svc insn %.8lx",
6413 (unsigned long) insn);
6414
6415 dsc->modinsn[0] = insn;
6416
6417 return install_svc (gdbarch, regs, dsc);
6418 }
6419
6420 static int
6421 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6422 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6423 {
6424
6425 displaced_debug_printf ("copying svc insn %.4x", insn);
6426
6427 dsc->modinsn[0] = insn;
6428
6429 return install_svc (gdbarch, regs, dsc);
6430 }
6431
6432 /* Copy undefined instructions. */
6433
6434 static int
6435 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6436 arm_displaced_step_copy_insn_closure *dsc)
6437 {
6438 displaced_debug_printf ("copying undefined insn %.8lx",
6439 (unsigned long) insn);
6440
6441 dsc->modinsn[0] = insn;
6442
6443 return 0;
6444 }
6445
6446 static int
6447 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6448 arm_displaced_step_copy_insn_closure *dsc)
6449 {
6450
6451 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6452 (unsigned short) insn1, (unsigned short) insn2);
6453
6454 dsc->modinsn[0] = insn1;
6455 dsc->modinsn[1] = insn2;
6456 dsc->numinsns = 2;
6457
6458 return 0;
6459 }
6460
6461 /* Copy unpredictable instructions. */
6462
6463 static int
6464 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6465 arm_displaced_step_copy_insn_closure *dsc)
6466 {
6467 displaced_debug_printf ("copying unpredictable insn %.8lx",
6468 (unsigned long) insn);
6469
6470 dsc->modinsn[0] = insn;
6471
6472 return 0;
6473 }
6474
6475 /* The decode_* functions are instruction decoding helpers. They mostly follow
6476 the presentation in the ARM ARM. */
6477
6478 static int
6479 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6480 struct regcache *regs,
6481 arm_displaced_step_copy_insn_closure *dsc)
6482 {
6483 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6484 unsigned int rn = bits (insn, 16, 19);
6485
6486 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6487 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6488 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6489 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6490 else if ((op1 & 0x60) == 0x20)
6491 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6492 else if ((op1 & 0x71) == 0x40)
6493 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6494 dsc);
6495 else if ((op1 & 0x77) == 0x41)
6496 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6497 else if ((op1 & 0x77) == 0x45)
6498 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6499 else if ((op1 & 0x77) == 0x51)
6500 {
6501 if (rn != 0xf)
6502 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6503 else
6504 return arm_copy_unpred (gdbarch, insn, dsc);
6505 }
6506 else if ((op1 & 0x77) == 0x55)
6507 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6508 else if (op1 == 0x57)
6509 switch (op2)
6510 {
6511 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6512 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6513 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6514 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6515 default: return arm_copy_unpred (gdbarch, insn, dsc);
6516 }
6517 else if ((op1 & 0x63) == 0x43)
6518 return arm_copy_unpred (gdbarch, insn, dsc);
6519 else if ((op2 & 0x1) == 0x0)
6520 switch (op1 & ~0x80)
6521 {
6522 case 0x61:
6523 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6524 case 0x65:
6525 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6526 case 0x71: case 0x75:
6527 /* pld/pldw reg. */
6528 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6529 case 0x63: case 0x67: case 0x73: case 0x77:
6530 return arm_copy_unpred (gdbarch, insn, dsc);
6531 default:
6532 return arm_copy_undef (gdbarch, insn, dsc);
6533 }
6534 else
6535 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6536 }
6537
6538 static int
6539 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6540 struct regcache *regs,
6541 arm_displaced_step_copy_insn_closure *dsc)
6542 {
6543 if (bit (insn, 27) == 0)
6544 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6545 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6546 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6547 {
6548 case 0x0: case 0x2:
6549 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6550
6551 case 0x1: case 0x3:
6552 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6553
6554 case 0x4: case 0x5: case 0x6: case 0x7:
6555 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6556
6557 case 0x8:
6558 switch ((insn & 0xe00000) >> 21)
6559 {
6560 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6561 /* stc/stc2. */
6562 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6563
6564 case 0x2:
6565 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6566
6567 default:
6568 return arm_copy_undef (gdbarch, insn, dsc);
6569 }
6570
6571 case 0x9:
6572 {
6573 int rn_f = (bits (insn, 16, 19) == 0xf);
6574 switch ((insn & 0xe00000) >> 21)
6575 {
6576 case 0x1: case 0x3:
6577 /* ldc/ldc2 imm (undefined for rn == pc). */
6578 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6579 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6580
6581 case 0x2:
6582 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6583
6584 case 0x4: case 0x5: case 0x6: case 0x7:
6585 /* ldc/ldc2 lit (undefined for rn != pc). */
6586 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6587 : arm_copy_undef (gdbarch, insn, dsc);
6588
6589 default:
6590 return arm_copy_undef (gdbarch, insn, dsc);
6591 }
6592 }
6593
6594 case 0xa:
6595 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6596
6597 case 0xb:
6598 if (bits (insn, 16, 19) == 0xf)
6599 /* ldc/ldc2 lit. */
6600 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6601 else
6602 return arm_copy_undef (gdbarch, insn, dsc);
6603
6604 case 0xc:
6605 if (bit (insn, 4))
6606 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6607 else
6608 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6609
6610 case 0xd:
6611 if (bit (insn, 4))
6612 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6613 else
6614 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6615
6616 default:
6617 return arm_copy_undef (gdbarch, insn, dsc);
6618 }
6619 }
6620
6621 /* Decode miscellaneous instructions in dp/misc encoding space. */
6622
6623 static int
6624 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6625 struct regcache *regs,
6626 arm_displaced_step_copy_insn_closure *dsc)
6627 {
6628 unsigned int op2 = bits (insn, 4, 6);
6629 unsigned int op = bits (insn, 21, 22);
6630
6631 switch (op2)
6632 {
6633 case 0x0:
6634 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6635
6636 case 0x1:
6637 if (op == 0x1) /* bx. */
6638 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6639 else if (op == 0x3)
6640 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6641 else
6642 return arm_copy_undef (gdbarch, insn, dsc);
6643
6644 case 0x2:
6645 if (op == 0x1)
6646 /* Not really supported. */
6647 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6648 else
6649 return arm_copy_undef (gdbarch, insn, dsc);
6650
6651 case 0x3:
6652 if (op == 0x1)
6653 return arm_copy_bx_blx_reg (gdbarch, insn,
6654 regs, dsc); /* blx register. */
6655 else
6656 return arm_copy_undef (gdbarch, insn, dsc);
6657
6658 case 0x5:
6659 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6660
6661 case 0x7:
6662 if (op == 0x1)
6663 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6664 else if (op == 0x3)
6665 /* Not really supported. */
6666 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6667 /* Fall through. */
6668
6669 default:
6670 return arm_copy_undef (gdbarch, insn, dsc);
6671 }
6672 }
6673
6674 static int
6675 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6676 struct regcache *regs,
6677 arm_displaced_step_copy_insn_closure *dsc)
6678 {
6679 if (bit (insn, 25))
6680 switch (bits (insn, 20, 24))
6681 {
6682 case 0x10:
6683 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6684
6685 case 0x14:
6686 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6687
6688 case 0x12: case 0x16:
6689 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6690
6691 default:
6692 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6693 }
6694 else
6695 {
6696 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6697
6698 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6699 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6700 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6701 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6702 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6703 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6704 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6705 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6706 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6707 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6708 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6709 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6710 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6711 /* 2nd arg means "unprivileged". */
6712 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6713 dsc);
6714 }
6715
6716 /* Should be unreachable. */
6717 return 1;
6718 }
6719
6720 static int
6721 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6722 struct regcache *regs,
6723 arm_displaced_step_copy_insn_closure *dsc)
6724 {
6725 int a = bit (insn, 25), b = bit (insn, 4);
6726 uint32_t op1 = bits (insn, 20, 24);
6727
6728 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6729 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6730 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6731 else if ((!a && (op1 & 0x17) == 0x02)
6732 || (a && (op1 & 0x17) == 0x02 && !b))
6733 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6734 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6735 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6736 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6737 else if ((!a && (op1 & 0x17) == 0x03)
6738 || (a && (op1 & 0x17) == 0x03 && !b))
6739 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6740 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6741 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6742 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6743 else if ((!a && (op1 & 0x17) == 0x06)
6744 || (a && (op1 & 0x17) == 0x06 && !b))
6745 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6746 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6747 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6748 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6749 else if ((!a && (op1 & 0x17) == 0x07)
6750 || (a && (op1 & 0x17) == 0x07 && !b))
6751 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6752
6753 /* Should be unreachable. */
6754 return 1;
6755 }
6756
6757 static int
6758 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6759 arm_displaced_step_copy_insn_closure *dsc)
6760 {
6761 switch (bits (insn, 20, 24))
6762 {
6763 case 0x00: case 0x01: case 0x02: case 0x03:
6764 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6765
6766 case 0x04: case 0x05: case 0x06: case 0x07:
6767 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6768
6769 case 0x08: case 0x09: case 0x0a: case 0x0b:
6770 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6771 return arm_copy_unmodified (gdbarch, insn,
6772 "decode/pack/unpack/saturate/reverse", dsc);
6773
6774 case 0x18:
6775 if (bits (insn, 5, 7) == 0) /* op2. */
6776 {
6777 if (bits (insn, 12, 15) == 0xf)
6778 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6779 else
6780 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6781 }
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc);
6784
6785 case 0x1a: case 0x1b:
6786 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6787 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6788 else
6789 return arm_copy_undef (gdbarch, insn, dsc);
6790
6791 case 0x1c: case 0x1d:
6792 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6793 {
6794 if (bits (insn, 0, 3) == 0xf)
6795 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6796 else
6797 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6798 }
6799 else
6800 return arm_copy_undef (gdbarch, insn, dsc);
6801
6802 case 0x1e: case 0x1f:
6803 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6804 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6805 else
6806 return arm_copy_undef (gdbarch, insn, dsc);
6807 }
6808
6809 /* Should be unreachable. */
6810 return 1;
6811 }
6812
6813 static int
6814 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6815 struct regcache *regs,
6816 arm_displaced_step_copy_insn_closure *dsc)
6817 {
6818 if (bit (insn, 25))
6819 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6820 else
6821 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6822 }
6823
6824 static int
6825 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6826 struct regcache *regs,
6827 arm_displaced_step_copy_insn_closure *dsc)
6828 {
6829 unsigned int opcode = bits (insn, 20, 24);
6830
6831 switch (opcode)
6832 {
6833 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6834 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6835
6836 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6837 case 0x12: case 0x16:
6838 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6839
6840 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6841 case 0x13: case 0x17:
6842 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6843
6844 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6845 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6846 /* Note: no writeback for these instructions. Bit 25 will always be
6847 zero though (via caller), so the following works OK. */
6848 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6849 }
6850
6851 /* Should be unreachable. */
6852 return 1;
6853 }
6854
6855 /* Decode shifted register instructions. */
6856
6857 static int
6858 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6859 uint16_t insn2, struct regcache *regs,
6860 arm_displaced_step_copy_insn_closure *dsc)
6861 {
6862 /* PC is only allowed to be used in instruction MOV. */
6863
6864 unsigned int op = bits (insn1, 5, 8);
6865 unsigned int rn = bits (insn1, 0, 3);
6866
6867 if (op == 0x2 && rn == 0xf) /* MOV */
6868 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6869 else
6870 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6871 "dp (shift reg)", dsc);
6872 }
6873
6874
6875 /* Decode extension register load/store. Exactly the same as
6876 arm_decode_ext_reg_ld_st. */
6877
6878 static int
6879 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6880 uint16_t insn2, struct regcache *regs,
6881 arm_displaced_step_copy_insn_closure *dsc)
6882 {
6883 unsigned int opcode = bits (insn1, 4, 8);
6884
6885 switch (opcode)
6886 {
6887 case 0x04: case 0x05:
6888 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6889 "vfp/neon vmov", dsc);
6890
6891 case 0x08: case 0x0c: /* 01x00 */
6892 case 0x0a: case 0x0e: /* 01x10 */
6893 case 0x12: case 0x16: /* 10x10 */
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 "vfp/neon vstm/vpush", dsc);
6896
6897 case 0x09: case 0x0d: /* 01x01 */
6898 case 0x0b: case 0x0f: /* 01x11 */
6899 case 0x13: case 0x17: /* 10x11 */
6900 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6901 "vfp/neon vldm/vpop", dsc);
6902
6903 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6905 "vstr", dsc);
6906 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6907 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6908 }
6909
6910 /* Should be unreachable. */
6911 return 1;
6912 }
6913
6914 static int
6915 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6916 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6917 {
6918 unsigned int op1 = bits (insn, 20, 25);
6919 int op = bit (insn, 4);
6920 unsigned int coproc = bits (insn, 8, 11);
6921
6922 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6923 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6924 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6925 && (coproc & 0xe) != 0xa)
6926 /* stc/stc2. */
6927 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6928 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6929 && (coproc & 0xe) != 0xa)
6930 /* ldc/ldc2 imm/lit. */
6931 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6932 else if ((op1 & 0x3e) == 0x00)
6933 return arm_copy_undef (gdbarch, insn, dsc);
6934 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6935 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6936 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6937 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6938 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6939 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6940 else if ((op1 & 0x30) == 0x20 && !op)
6941 {
6942 if ((coproc & 0xe) == 0xa)
6943 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6944 else
6945 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6946 }
6947 else if ((op1 & 0x30) == 0x20 && op)
6948 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6949 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6950 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6951 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6952 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6953 else if ((op1 & 0x30) == 0x30)
6954 return arm_copy_svc (gdbarch, insn, regs, dsc);
6955 else
6956 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6957 }
6958
6959 static int
6960 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6961 uint16_t insn2, struct regcache *regs,
6962 arm_displaced_step_copy_insn_closure *dsc)
6963 {
6964 unsigned int coproc = bits (insn2, 8, 11);
6965 unsigned int bit_5_8 = bits (insn1, 5, 8);
6966 unsigned int bit_9 = bit (insn1, 9);
6967 unsigned int bit_4 = bit (insn1, 4);
6968
6969 if (bit_9 == 0)
6970 {
6971 if (bit_5_8 == 2)
6972 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6973 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6974 dsc);
6975 else if (bit_5_8 == 0) /* UNDEFINED. */
6976 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6977 else
6978 {
6979 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6980 if ((coproc & 0xe) == 0xa)
6981 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6982 dsc);
6983 else /* coproc is not 101x. */
6984 {
6985 if (bit_4 == 0) /* STC/STC2. */
6986 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6987 "stc/stc2", dsc);
6988 else /* LDC/LDC2 {literal, immediate}. */
6989 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6990 regs, dsc);
6991 }
6992 }
6993 }
6994 else
6995 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6996
6997 return 0;
6998 }
6999
7000 static void
7001 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7002 arm_displaced_step_copy_insn_closure *dsc, int rd)
7003 {
7004 /* ADR Rd, #imm
7005
7006 Rewrite as:
7007
7008 Preparation: Rd <- PC
7009 Insn: ADD Rd, #imm
7010 Cleanup: Null.
7011 */
7012
7013 /* Rd <- PC */
7014 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7015 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7016 }
7017
7018 static int
7019 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7020 arm_displaced_step_copy_insn_closure *dsc,
7021 int rd, unsigned int imm)
7022 {
7023
7024 /* Encoding T2: ADDS Rd, #imm */
7025 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7026
7027 install_pc_relative (gdbarch, regs, dsc, rd);
7028
7029 return 0;
7030 }
7031
7032 static int
7033 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7034 struct regcache *regs,
7035 arm_displaced_step_copy_insn_closure *dsc)
7036 {
7037 unsigned int rd = bits (insn, 8, 10);
7038 unsigned int imm8 = bits (insn, 0, 7);
7039
7040 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7041 rd, imm8, insn);
7042
7043 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7044 }
7045
7046 static int
7047 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7048 uint16_t insn2, struct regcache *regs,
7049 arm_displaced_step_copy_insn_closure *dsc)
7050 {
7051 unsigned int rd = bits (insn2, 8, 11);
7052 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7053 extract raw immediate encoding rather than computing immediate. When
7054 generating ADD or SUB instruction, we can simply perform OR operation to
7055 set immediate into ADD. */
7056 unsigned int imm_3_8 = insn2 & 0x70ff;
7057 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7058
7059 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7060 rd, imm_i, imm_3_8, insn1, insn2);
7061
7062 if (bit (insn1, 7)) /* Encoding T2 */
7063 {
7064 /* Encoding T3: SUB Rd, Rd, #imm */
7065 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7066 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7067 }
7068 else /* Encoding T3 */
7069 {
7070 /* Encoding T3: ADD Rd, Rd, #imm */
7071 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7072 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7073 }
7074 dsc->numinsns = 2;
7075
7076 install_pc_relative (gdbarch, regs, dsc, rd);
7077
7078 return 0;
7079 }
7080
7081 static int
7082 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7083 struct regcache *regs,
7084 arm_displaced_step_copy_insn_closure *dsc)
7085 {
7086 unsigned int rt = bits (insn1, 8, 10);
7087 unsigned int pc;
7088 int imm8 = (bits (insn1, 0, 7) << 2);
7089
7090 /* LDR Rd, #imm8
7091
7092 Rwrite as:
7093
7094 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7095
7096 Insn: LDR R0, [R2, R3];
7097 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7098
7099 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7100
7101 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7102 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7103 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7104 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7105 /* The assembler calculates the required value of the offset from the
7106 Align(PC,4) value of this instruction to the label. */
7107 pc = pc & 0xfffffffc;
7108
7109 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7110 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7111
7112 dsc->rd = rt;
7113 dsc->u.ldst.xfersize = 4;
7114 dsc->u.ldst.rn = 0;
7115 dsc->u.ldst.immed = 0;
7116 dsc->u.ldst.writeback = 0;
7117 dsc->u.ldst.restore_r4 = 0;
7118
7119 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7120
7121 dsc->cleanup = &cleanup_load;
7122
7123 return 0;
7124 }
7125
7126 /* Copy Thumb cbnz/cbz instruction. */
7127
7128 static int
7129 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7130 struct regcache *regs,
7131 arm_displaced_step_copy_insn_closure *dsc)
7132 {
7133 int non_zero = bit (insn1, 11);
7134 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7135 CORE_ADDR from = dsc->insn_addr;
7136 int rn = bits (insn1, 0, 2);
7137 int rn_val = displaced_read_reg (regs, dsc, rn);
7138
7139 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7140 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7141 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7142 condition is false, let it be, cleanup_branch will do nothing. */
7143 if (dsc->u.branch.cond)
7144 {
7145 dsc->u.branch.cond = INST_AL;
7146 dsc->u.branch.dest = from + 4 + imm5;
7147 }
7148 else
7149 dsc->u.branch.dest = from + 2;
7150
7151 dsc->u.branch.link = 0;
7152 dsc->u.branch.exchange = 0;
7153
7154 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7155 non_zero ? "cbnz" : "cbz",
7156 rn, rn_val, insn1, dsc->u.branch.dest);
7157
7158 dsc->modinsn[0] = THUMB_NOP;
7159
7160 dsc->cleanup = &cleanup_branch;
7161 return 0;
7162 }
7163
7164 /* Copy Table Branch Byte/Halfword */
7165 static int
7166 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7167 uint16_t insn2, struct regcache *regs,
7168 arm_displaced_step_copy_insn_closure *dsc)
7169 {
7170 ULONGEST rn_val, rm_val;
7171 int is_tbh = bit (insn2, 4);
7172 CORE_ADDR halfwords = 0;
7173 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7174
7175 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7176 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7177
7178 if (is_tbh)
7179 {
7180 gdb_byte buf[2];
7181
7182 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7183 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7184 }
7185 else
7186 {
7187 gdb_byte buf[1];
7188
7189 target_read_memory (rn_val + rm_val, buf, 1);
7190 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7191 }
7192
7193 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7194 is_tbh ? "tbh" : "tbb",
7195 (unsigned int) rn_val, (unsigned int) rm_val,
7196 (unsigned int) halfwords);
7197
7198 dsc->u.branch.cond = INST_AL;
7199 dsc->u.branch.link = 0;
7200 dsc->u.branch.exchange = 0;
7201 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7202
7203 dsc->cleanup = &cleanup_branch;
7204
7205 return 0;
7206 }
7207
7208 static void
7209 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7210 arm_displaced_step_copy_insn_closure *dsc)
7211 {
7212 /* PC <- r7 */
7213 int val = displaced_read_reg (regs, dsc, 7);
7214 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7215
7216 /* r7 <- r8 */
7217 val = displaced_read_reg (regs, dsc, 8);
7218 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7219
7220 /* r8 <- tmp[0] */
7221 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7222
7223 }
7224
7225 static int
7226 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7227 struct regcache *regs,
7228 arm_displaced_step_copy_insn_closure *dsc)
7229 {
7230 dsc->u.block.regmask = insn1 & 0x00ff;
7231
7232 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7233 to :
7234
7235 (1) register list is full, that is, r0-r7 are used.
7236 Prepare: tmp[0] <- r8
7237
7238 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7239 MOV r8, r7; Move value of r7 to r8;
7240 POP {r7}; Store PC value into r7.
7241
7242 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7243
7244 (2) register list is not full, supposing there are N registers in
7245 register list (except PC, 0 <= N <= 7).
7246 Prepare: for each i, 0 - N, tmp[i] <- ri.
7247
7248 POP {r0, r1, ...., rN};
7249
7250 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7251 from tmp[] properly.
7252 */
7253 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7254 dsc->u.block.regmask, insn1);
7255
7256 if (dsc->u.block.regmask == 0xff)
7257 {
7258 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7259
7260 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7261 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7262 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7263
7264 dsc->numinsns = 3;
7265 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7266 }
7267 else
7268 {
7269 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7270 unsigned int i;
7271 unsigned int new_regmask;
7272
7273 for (i = 0; i < num_in_list + 1; i++)
7274 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7275
7276 new_regmask = (1 << (num_in_list + 1)) - 1;
7277
7278 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7279 "modified list %.4x",
7280 (int) dsc->u.block.regmask, new_regmask);
7281
7282 dsc->u.block.regmask |= 0x8000;
7283 dsc->u.block.writeback = 0;
7284 dsc->u.block.cond = INST_AL;
7285
7286 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7287
7288 dsc->cleanup = &cleanup_block_load_pc;
7289 }
7290
7291 return 0;
7292 }
7293
7294 static void
7295 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7296 struct regcache *regs,
7297 arm_displaced_step_copy_insn_closure *dsc)
7298 {
7299 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7300 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7301 int err = 0;
7302
7303 /* 16-bit thumb instructions. */
7304 switch (op_bit_12_15)
7305 {
7306 /* Shift (imme), add, subtract, move and compare. */
7307 case 0: case 1: case 2: case 3:
7308 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7309 "shift/add/sub/mov/cmp",
7310 dsc);
7311 break;
7312 case 4:
7313 switch (op_bit_10_11)
7314 {
7315 case 0: /* Data-processing */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7317 "data-processing",
7318 dsc);
7319 break;
7320 case 1: /* Special data instructions and branch and exchange. */
7321 {
7322 unsigned short op = bits (insn1, 7, 9);
7323 if (op == 6 || op == 7) /* BX or BLX */
7324 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7325 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7326 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7327 else
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7329 dsc);
7330 }
7331 break;
7332 default: /* LDR (literal) */
7333 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7334 }
7335 break;
7336 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7338 break;
7339 case 10:
7340 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7341 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7342 else /* Generate SP-relative address */
7343 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7344 break;
7345 case 11: /* Misc 16-bit instructions */
7346 {
7347 switch (bits (insn1, 8, 11))
7348 {
7349 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7350 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7351 break;
7352 case 12: case 13: /* POP */
7353 if (bit (insn1, 8)) /* PC is in register list. */
7354 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7355 else
7356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7357 break;
7358 case 15: /* If-Then, and hints */
7359 if (bits (insn1, 0, 3))
7360 /* If-Then makes up to four following instructions conditional.
7361 IT instruction itself is not conditional, so handle it as a
7362 common unmodified instruction. */
7363 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7364 dsc);
7365 else
7366 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7367 break;
7368 default:
7369 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7370 }
7371 }
7372 break;
7373 case 12:
7374 if (op_bit_10_11 < 2) /* Store multiple registers */
7375 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7376 else /* Load multiple registers */
7377 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7378 break;
7379 case 13: /* Conditional branch and supervisor call */
7380 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7381 err = thumb_copy_b (gdbarch, insn1, dsc);
7382 else
7383 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7384 break;
7385 case 14: /* Unconditional branch */
7386 err = thumb_copy_b (gdbarch, insn1, dsc);
7387 break;
7388 default:
7389 err = 1;
7390 }
7391
7392 if (err)
7393 internal_error (__FILE__, __LINE__,
7394 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7395 }
7396
7397 static int
7398 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7399 uint16_t insn1, uint16_t insn2,
7400 struct regcache *regs,
7401 arm_displaced_step_copy_insn_closure *dsc)
7402 {
7403 int rt = bits (insn2, 12, 15);
7404 int rn = bits (insn1, 0, 3);
7405 int op1 = bits (insn1, 7, 8);
7406
7407 switch (bits (insn1, 5, 6))
7408 {
7409 case 0: /* Load byte and memory hints */
7410 if (rt == 0xf) /* PLD/PLI */
7411 {
7412 if (rn == 0xf)
7413 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7414 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7415 else
7416 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "pli/pld", dsc);
7418 }
7419 else
7420 {
7421 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7422 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7423 1);
7424 else
7425 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "ldrb{reg, immediate}/ldrbt",
7427 dsc);
7428 }
7429
7430 break;
7431 case 1: /* Load halfword and memory hints. */
7432 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7433 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 "pld/unalloc memhint", dsc);
7435 else
7436 {
7437 if (rn == 0xf)
7438 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7439 2);
7440 else
7441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "ldrh/ldrht", dsc);
7443 }
7444 break;
7445 case 2: /* Load word */
7446 {
7447 int insn2_bit_8_11 = bits (insn2, 8, 11);
7448
7449 if (rn == 0xf)
7450 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7451 else if (op1 == 0x1) /* Encoding T3 */
7452 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7453 0, 1);
7454 else /* op1 == 0x0 */
7455 {
7456 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7457 /* LDR (immediate) */
7458 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7459 dsc, bit (insn2, 8), 1);
7460 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7461 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7462 "ldrt", dsc);
7463 else
7464 /* LDR (register) */
7465 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7466 dsc, 0, 0);
7467 }
7468 break;
7469 }
7470 default:
7471 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7472 break;
7473 }
7474 return 0;
7475 }
7476
7477 static void
7478 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7479 uint16_t insn2, struct regcache *regs,
7480 arm_displaced_step_copy_insn_closure *dsc)
7481 {
7482 int err = 0;
7483 unsigned short op = bit (insn2, 15);
7484 unsigned int op1 = bits (insn1, 11, 12);
7485
7486 switch (op1)
7487 {
7488 case 1:
7489 {
7490 switch (bits (insn1, 9, 10))
7491 {
7492 case 0:
7493 if (bit (insn1, 6))
7494 {
7495 /* Load/store {dual, exclusive}, table branch. */
7496 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7497 && bits (insn2, 5, 7) == 0)
7498 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7499 dsc);
7500 else
7501 /* PC is not allowed to use in load/store {dual, exclusive}
7502 instructions. */
7503 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7504 "load/store dual/ex", dsc);
7505 }
7506 else /* load/store multiple */
7507 {
7508 switch (bits (insn1, 7, 8))
7509 {
7510 case 0: case 3: /* SRS, RFE */
7511 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7512 "srs/rfe", dsc);
7513 break;
7514 case 1: case 2: /* LDM/STM/PUSH/POP */
7515 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7516 break;
7517 }
7518 }
7519 break;
7520
7521 case 1:
7522 /* Data-processing (shift register). */
7523 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7524 dsc);
7525 break;
7526 default: /* Coprocessor instructions. */
7527 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7528 break;
7529 }
7530 break;
7531 }
7532 case 2: /* op1 = 2 */
7533 if (op) /* Branch and misc control. */
7534 {
7535 if (bit (insn2, 14) /* BLX/BL */
7536 || bit (insn2, 12) /* Unconditional branch */
7537 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7538 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7539 else
7540 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7541 "misc ctrl", dsc);
7542 }
7543 else
7544 {
7545 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7546 {
7547 int dp_op = bits (insn1, 4, 8);
7548 int rn = bits (insn1, 0, 3);
7549 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7550 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7551 regs, dsc);
7552 else
7553 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7554 "dp/pb", dsc);
7555 }
7556 else /* Data processing (modified immediate) */
7557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7558 "dp/mi", dsc);
7559 }
7560 break;
7561 case 3: /* op1 = 3 */
7562 switch (bits (insn1, 9, 10))
7563 {
7564 case 0:
7565 if (bit (insn1, 4))
7566 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7567 regs, dsc);
7568 else /* NEON Load/Store and Store single data item */
7569 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7570 "neon elt/struct load/store",
7571 dsc);
7572 break;
7573 case 1: /* op1 = 3, bits (9, 10) == 1 */
7574 switch (bits (insn1, 7, 8))
7575 {
7576 case 0: case 1: /* Data processing (register) */
7577 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7578 "dp(reg)", dsc);
7579 break;
7580 case 2: /* Multiply and absolute difference */
7581 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7582 "mul/mua/diff", dsc);
7583 break;
7584 case 3: /* Long multiply and divide */
7585 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7586 "lmul/lmua", dsc);
7587 break;
7588 }
7589 break;
7590 default: /* Coprocessor instructions */
7591 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7592 break;
7593 }
7594 break;
7595 default:
7596 err = 1;
7597 }
7598
7599 if (err)
7600 internal_error (__FILE__, __LINE__,
7601 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7602
7603 }
7604
7605 static void
7606 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7607 struct regcache *regs,
7608 arm_displaced_step_copy_insn_closure *dsc)
7609 {
7610 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7611 uint16_t insn1
7612 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7613
7614 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7615 insn1, (unsigned long) from);
7616
7617 dsc->is_thumb = 1;
7618 dsc->insn_size = thumb_insn_size (insn1);
7619 if (thumb_insn_size (insn1) == 4)
7620 {
7621 uint16_t insn2
7622 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7623 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7624 }
7625 else
7626 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7627 }
7628
7629 void
7630 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7631 CORE_ADDR to, struct regcache *regs,
7632 arm_displaced_step_copy_insn_closure *dsc)
7633 {
7634 int err = 0;
7635 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7636 uint32_t insn;
7637
7638 /* Most displaced instructions use a 1-instruction scratch space, so set this
7639 here and override below if/when necessary. */
7640 dsc->numinsns = 1;
7641 dsc->insn_addr = from;
7642 dsc->scratch_base = to;
7643 dsc->cleanup = NULL;
7644 dsc->wrote_to_pc = 0;
7645
7646 if (!displaced_in_arm_mode (regs))
7647 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7648
7649 dsc->is_thumb = 0;
7650 dsc->insn_size = 4;
7651 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7652 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7653 (unsigned long) insn, (unsigned long) from);
7654
7655 if ((insn & 0xf0000000) == 0xf0000000)
7656 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7657 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7658 {
7659 case 0x0: case 0x1: case 0x2: case 0x3:
7660 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7661 break;
7662
7663 case 0x4: case 0x5: case 0x6:
7664 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7665 break;
7666
7667 case 0x7:
7668 err = arm_decode_media (gdbarch, insn, dsc);
7669 break;
7670
7671 case 0x8: case 0x9: case 0xa: case 0xb:
7672 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7673 break;
7674
7675 case 0xc: case 0xd: case 0xe: case 0xf:
7676 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7677 break;
7678 }
7679
7680 if (err)
7681 internal_error (__FILE__, __LINE__,
7682 _("arm_process_displaced_insn: Instruction decode error"));
7683 }
7684
7685 /* Actually set up the scratch space for a displaced instruction. */
7686
7687 void
7688 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7689 CORE_ADDR to,
7690 arm_displaced_step_copy_insn_closure *dsc)
7691 {
7692 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7693 unsigned int i, len, offset;
7694 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7695 int size = dsc->is_thumb? 2 : 4;
7696 const gdb_byte *bkp_insn;
7697
7698 offset = 0;
7699 /* Poke modified instruction(s). */
7700 for (i = 0; i < dsc->numinsns; i++)
7701 {
7702 if (size == 4)
7703 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7704 dsc->modinsn[i], (unsigned long) to + offset);
7705 else if (size == 2)
7706 displaced_debug_printf ("writing insn %.4x at %.8lx",
7707 (unsigned short) dsc->modinsn[i],
7708 (unsigned long) to + offset);
7709
7710 write_memory_unsigned_integer (to + offset, size,
7711 byte_order_for_code,
7712 dsc->modinsn[i]);
7713 offset += size;
7714 }
7715
7716 /* Choose the correct breakpoint instruction. */
7717 if (dsc->is_thumb)
7718 {
7719 bkp_insn = tdep->thumb_breakpoint;
7720 len = tdep->thumb_breakpoint_size;
7721 }
7722 else
7723 {
7724 bkp_insn = tdep->arm_breakpoint;
7725 len = tdep->arm_breakpoint_size;
7726 }
7727
7728 /* Put breakpoint afterwards. */
7729 write_memory (to + offset, bkp_insn, len);
7730
7731 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7732 paddress (gdbarch, to));
7733 }
7734
7735 /* Entry point for cleaning things up after a displaced instruction has been
7736 single-stepped. */
7737
7738 void
7739 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7740 struct displaced_step_copy_insn_closure *dsc_,
7741 CORE_ADDR from, CORE_ADDR to,
7742 struct regcache *regs)
7743 {
7744 arm_displaced_step_copy_insn_closure *dsc
7745 = (arm_displaced_step_copy_insn_closure *) dsc_;
7746
7747 if (dsc->cleanup)
7748 dsc->cleanup (gdbarch, regs, dsc);
7749
7750 if (!dsc->wrote_to_pc)
7751 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7752 dsc->insn_addr + dsc->insn_size);
7753
7754 }
7755
7756 #include "bfd-in2.h"
7757 #include "libcoff.h"
7758
7759 static int
7760 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7761 {
7762 gdb_disassembler *di
7763 = static_cast<gdb_disassembler *>(info->application_data);
7764 struct gdbarch *gdbarch = di->arch ();
7765
7766 if (arm_pc_is_thumb (gdbarch, memaddr))
7767 {
7768 static asymbol *asym;
7769 static combined_entry_type ce;
7770 static struct coff_symbol_struct csym;
7771 static struct bfd fake_bfd;
7772 static bfd_target fake_target;
7773
7774 if (csym.native == NULL)
7775 {
7776 /* Create a fake symbol vector containing a Thumb symbol.
7777 This is solely so that the code in print_insn_little_arm()
7778 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7779 the presence of a Thumb symbol and switch to decoding
7780 Thumb instructions. */
7781
7782 fake_target.flavour = bfd_target_coff_flavour;
7783 fake_bfd.xvec = &fake_target;
7784 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7785 csym.native = &ce;
7786 csym.symbol.the_bfd = &fake_bfd;
7787 csym.symbol.name = "fake";
7788 asym = (asymbol *) & csym;
7789 }
7790
7791 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7792 info->symbols = &asym;
7793 }
7794 else
7795 info->symbols = NULL;
7796
7797 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7798 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7799 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7800 the assert on the mismatch of info->mach and
7801 bfd_get_mach (current_program_space->exec_bfd ()) in
7802 default_print_insn. */
7803 if (current_program_space->exec_bfd () != NULL
7804 && (current_program_space->exec_bfd ()->arch_info
7805 == gdbarch_bfd_arch_info (gdbarch)))
7806 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7807
7808 return default_print_insn (memaddr, info);
7809 }
7810
7811 /* The following define instruction sequences that will cause ARM
7812 cpu's to take an undefined instruction trap. These are used to
7813 signal a breakpoint to GDB.
7814
7815 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7816 modes. A different instruction is required for each mode. The ARM
7817 cpu's can also be big or little endian. Thus four different
7818 instructions are needed to support all cases.
7819
7820 Note: ARMv4 defines several new instructions that will take the
7821 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7822 not in fact add the new instructions. The new undefined
7823 instructions in ARMv4 are all instructions that had no defined
7824 behaviour in earlier chips. There is no guarantee that they will
7825 raise an exception, but may be treated as NOP's. In practice, it
7826 may only safe to rely on instructions matching:
7827
7828 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7829 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7830 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7831
7832 Even this may only true if the condition predicate is true. The
7833 following use a condition predicate of ALWAYS so it is always TRUE.
7834
7835 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7836 and NetBSD all use a software interrupt rather than an undefined
7837 instruction to force a trap. This can be handled by by the
7838 abi-specific code during establishment of the gdbarch vector. */
7839
7840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7844
7845 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7846 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7847 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7848 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7849
7850 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7851
7852 static int
7853 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7854 {
7855 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7856 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7857
7858 if (arm_pc_is_thumb (gdbarch, *pcptr))
7859 {
7860 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7861
7862 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7863 check whether we are replacing a 32-bit instruction. */
7864 if (tdep->thumb2_breakpoint != NULL)
7865 {
7866 gdb_byte buf[2];
7867
7868 if (target_read_memory (*pcptr, buf, 2) == 0)
7869 {
7870 unsigned short inst1;
7871
7872 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7873 if (thumb_insn_size (inst1) == 4)
7874 return ARM_BP_KIND_THUMB2;
7875 }
7876 }
7877
7878 return ARM_BP_KIND_THUMB;
7879 }
7880 else
7881 return ARM_BP_KIND_ARM;
7882
7883 }
7884
7885 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7886
7887 static const gdb_byte *
7888 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7889 {
7890 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7891
7892 switch (kind)
7893 {
7894 case ARM_BP_KIND_ARM:
7895 *size = tdep->arm_breakpoint_size;
7896 return tdep->arm_breakpoint;
7897 case ARM_BP_KIND_THUMB:
7898 *size = tdep->thumb_breakpoint_size;
7899 return tdep->thumb_breakpoint;
7900 case ARM_BP_KIND_THUMB2:
7901 *size = tdep->thumb2_breakpoint_size;
7902 return tdep->thumb2_breakpoint;
7903 default:
7904 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7905 }
7906 }
7907
7908 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7909
7910 static int
7911 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7912 struct regcache *regcache,
7913 CORE_ADDR *pcptr)
7914 {
7915 gdb_byte buf[4];
7916
7917 /* Check the memory pointed by PC is readable. */
7918 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7919 {
7920 struct arm_get_next_pcs next_pcs_ctx;
7921
7922 arm_get_next_pcs_ctor (&next_pcs_ctx,
7923 &arm_get_next_pcs_ops,
7924 gdbarch_byte_order (gdbarch),
7925 gdbarch_byte_order_for_code (gdbarch),
7926 0,
7927 regcache);
7928
7929 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7930
7931 /* If MEMADDR is the next instruction of current pc, do the
7932 software single step computation, and get the thumb mode by
7933 the destination address. */
7934 for (CORE_ADDR pc : next_pcs)
7935 {
7936 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7937 {
7938 if (IS_THUMB_ADDR (pc))
7939 {
7940 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7941 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7942 }
7943 else
7944 return ARM_BP_KIND_ARM;
7945 }
7946 }
7947 }
7948
7949 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7950 }
7951
7952 /* Extract from an array REGBUF containing the (raw) register state a
7953 function return value of type TYPE, and copy that, in virtual
7954 format, into VALBUF. */
7955
7956 static void
7957 arm_extract_return_value (struct type *type, struct regcache *regs,
7958 gdb_byte *valbuf)
7959 {
7960 struct gdbarch *gdbarch = regs->arch ();
7961 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7962 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7963
7964 if (TYPE_CODE_FLT == type->code ())
7965 {
7966 switch (tdep->fp_model)
7967 {
7968 case ARM_FLOAT_FPA:
7969 {
7970 /* The value is in register F0 in internal format. We need to
7971 extract the raw value and then convert it to the desired
7972 internal type. */
7973 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7974
7975 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7976 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7977 valbuf, type);
7978 }
7979 break;
7980
7981 case ARM_FLOAT_SOFT_FPA:
7982 case ARM_FLOAT_SOFT_VFP:
7983 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7984 not using the VFP ABI code. */
7985 case ARM_FLOAT_VFP:
7986 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7987 if (TYPE_LENGTH (type) > 4)
7988 regs->cooked_read (ARM_A1_REGNUM + 1,
7989 valbuf + ARM_INT_REGISTER_SIZE);
7990 break;
7991
7992 default:
7993 internal_error (__FILE__, __LINE__,
7994 _("arm_extract_return_value: "
7995 "Floating point model not supported"));
7996 break;
7997 }
7998 }
7999 else if (type->code () == TYPE_CODE_INT
8000 || type->code () == TYPE_CODE_CHAR
8001 || type->code () == TYPE_CODE_BOOL
8002 || type->code () == TYPE_CODE_PTR
8003 || TYPE_IS_REFERENCE (type)
8004 || type->code () == TYPE_CODE_ENUM)
8005 {
8006 /* If the type is a plain integer, then the access is
8007 straight-forward. Otherwise we have to play around a bit
8008 more. */
8009 int len = TYPE_LENGTH (type);
8010 int regno = ARM_A1_REGNUM;
8011 ULONGEST tmp;
8012
8013 while (len > 0)
8014 {
8015 /* By using store_unsigned_integer we avoid having to do
8016 anything special for small big-endian values. */
8017 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8018 store_unsigned_integer (valbuf,
8019 (len > ARM_INT_REGISTER_SIZE
8020 ? ARM_INT_REGISTER_SIZE : len),
8021 byte_order, tmp);
8022 len -= ARM_INT_REGISTER_SIZE;
8023 valbuf += ARM_INT_REGISTER_SIZE;
8024 }
8025 }
8026 else
8027 {
8028 /* For a structure or union the behaviour is as if the value had
8029 been stored to word-aligned memory and then loaded into
8030 registers with 32-bit load instruction(s). */
8031 int len = TYPE_LENGTH (type);
8032 int regno = ARM_A1_REGNUM;
8033 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8034
8035 while (len > 0)
8036 {
8037 regs->cooked_read (regno++, tmpbuf);
8038 memcpy (valbuf, tmpbuf,
8039 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8040 len -= ARM_INT_REGISTER_SIZE;
8041 valbuf += ARM_INT_REGISTER_SIZE;
8042 }
8043 }
8044 }
8045
8046
8047 /* Will a function return an aggregate type in memory or in a
8048 register? Return 0 if an aggregate type can be returned in a
8049 register, 1 if it must be returned in memory. */
8050
8051 static int
8052 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8053 {
8054 enum type_code code;
8055
8056 type = check_typedef (type);
8057
8058 /* Simple, non-aggregate types (ie not including vectors and
8059 complex) are always returned in a register (or registers). */
8060 code = type->code ();
8061 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8062 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8063 return 0;
8064
8065 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8066 {
8067 /* Vector values should be returned using ARM registers if they
8068 are not over 16 bytes. */
8069 return (TYPE_LENGTH (type) > 16);
8070 }
8071
8072 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8073 if (tdep->arm_abi != ARM_ABI_APCS)
8074 {
8075 /* The AAPCS says all aggregates not larger than a word are returned
8076 in a register. */
8077 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8078 return 0;
8079
8080 return 1;
8081 }
8082 else
8083 {
8084 int nRc;
8085
8086 /* All aggregate types that won't fit in a register must be returned
8087 in memory. */
8088 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8089 return 1;
8090
8091 /* In the ARM ABI, "integer" like aggregate types are returned in
8092 registers. For an aggregate type to be integer like, its size
8093 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8094 offset of each addressable subfield must be zero. Note that bit
8095 fields are not addressable, and all addressable subfields of
8096 unions always start at offset zero.
8097
8098 This function is based on the behaviour of GCC 2.95.1.
8099 See: gcc/arm.c: arm_return_in_memory() for details.
8100
8101 Note: All versions of GCC before GCC 2.95.2 do not set up the
8102 parameters correctly for a function returning the following
8103 structure: struct { float f;}; This should be returned in memory,
8104 not a register. Richard Earnshaw sent me a patch, but I do not
8105 know of any way to detect if a function like the above has been
8106 compiled with the correct calling convention. */
8107
8108 /* Assume all other aggregate types can be returned in a register.
8109 Run a check for structures, unions and arrays. */
8110 nRc = 0;
8111
8112 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8113 {
8114 int i;
8115 /* Need to check if this struct/union is "integer" like. For
8116 this to be true, its size must be less than or equal to
8117 ARM_INT_REGISTER_SIZE and the offset of each addressable
8118 subfield must be zero. Note that bit fields are not
8119 addressable, and unions always start at offset zero. If any
8120 of the subfields is a floating point type, the struct/union
8121 cannot be an integer type. */
8122
8123 /* For each field in the object, check:
8124 1) Is it FP? --> yes, nRc = 1;
8125 2) Is it addressable (bitpos != 0) and
8126 not packed (bitsize == 0)?
8127 --> yes, nRc = 1
8128 */
8129
8130 for (i = 0; i < type->num_fields (); i++)
8131 {
8132 enum type_code field_type_code;
8133
8134 field_type_code
8135 = check_typedef (type->field (i).type ())->code ();
8136
8137 /* Is it a floating point type field? */
8138 if (field_type_code == TYPE_CODE_FLT)
8139 {
8140 nRc = 1;
8141 break;
8142 }
8143
8144 /* If bitpos != 0, then we have to care about it. */
8145 if (type->field (i).loc_bitpos () != 0)
8146 {
8147 /* Bitfields are not addressable. If the field bitsize is
8148 zero, then the field is not packed. Hence it cannot be
8149 a bitfield or any other packed type. */
8150 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8151 {
8152 nRc = 1;
8153 break;
8154 }
8155 }
8156 }
8157 }
8158
8159 return nRc;
8160 }
8161 }
8162
8163 /* Write into appropriate registers a function return value of type
8164 TYPE, given in virtual format. */
8165
8166 static void
8167 arm_store_return_value (struct type *type, struct regcache *regs,
8168 const gdb_byte *valbuf)
8169 {
8170 struct gdbarch *gdbarch = regs->arch ();
8171 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8172
8173 if (type->code () == TYPE_CODE_FLT)
8174 {
8175 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8176 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8177
8178 switch (tdep->fp_model)
8179 {
8180 case ARM_FLOAT_FPA:
8181
8182 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8183 regs->cooked_write (ARM_F0_REGNUM, buf);
8184 break;
8185
8186 case ARM_FLOAT_SOFT_FPA:
8187 case ARM_FLOAT_SOFT_VFP:
8188 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8189 not using the VFP ABI code. */
8190 case ARM_FLOAT_VFP:
8191 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8192 if (TYPE_LENGTH (type) > 4)
8193 regs->cooked_write (ARM_A1_REGNUM + 1,
8194 valbuf + ARM_INT_REGISTER_SIZE);
8195 break;
8196
8197 default:
8198 internal_error (__FILE__, __LINE__,
8199 _("arm_store_return_value: Floating "
8200 "point model not supported"));
8201 break;
8202 }
8203 }
8204 else if (type->code () == TYPE_CODE_INT
8205 || type->code () == TYPE_CODE_CHAR
8206 || type->code () == TYPE_CODE_BOOL
8207 || type->code () == TYPE_CODE_PTR
8208 || TYPE_IS_REFERENCE (type)
8209 || type->code () == TYPE_CODE_ENUM)
8210 {
8211 if (TYPE_LENGTH (type) <= 4)
8212 {
8213 /* Values of one word or less are zero/sign-extended and
8214 returned in r0. */
8215 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8216 LONGEST val = unpack_long (type, valbuf);
8217
8218 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8219 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8220 }
8221 else
8222 {
8223 /* Integral values greater than one word are stored in consecutive
8224 registers starting with r0. This will always be a multiple of
8225 the regiser size. */
8226 int len = TYPE_LENGTH (type);
8227 int regno = ARM_A1_REGNUM;
8228
8229 while (len > 0)
8230 {
8231 regs->cooked_write (regno++, valbuf);
8232 len -= ARM_INT_REGISTER_SIZE;
8233 valbuf += ARM_INT_REGISTER_SIZE;
8234 }
8235 }
8236 }
8237 else
8238 {
8239 /* For a structure or union the behaviour is as if the value had
8240 been stored to word-aligned memory and then loaded into
8241 registers with 32-bit load instruction(s). */
8242 int len = TYPE_LENGTH (type);
8243 int regno = ARM_A1_REGNUM;
8244 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8245
8246 while (len > 0)
8247 {
8248 memcpy (tmpbuf, valbuf,
8249 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8250 regs->cooked_write (regno++, tmpbuf);
8251 len -= ARM_INT_REGISTER_SIZE;
8252 valbuf += ARM_INT_REGISTER_SIZE;
8253 }
8254 }
8255 }
8256
8257
8258 /* Handle function return values. */
8259
8260 static enum return_value_convention
8261 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8262 struct type *valtype, struct regcache *regcache,
8263 gdb_byte *readbuf, const gdb_byte *writebuf)
8264 {
8265 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8266 struct type *func_type = function ? value_type (function) : NULL;
8267 enum arm_vfp_cprc_base_type vfp_base_type;
8268 int vfp_base_count;
8269
8270 if (arm_vfp_abi_for_function (gdbarch, func_type)
8271 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8272 {
8273 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8274 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8275 int i;
8276 for (i = 0; i < vfp_base_count; i++)
8277 {
8278 if (reg_char == 'q')
8279 {
8280 if (writebuf)
8281 arm_neon_quad_write (gdbarch, regcache, i,
8282 writebuf + i * unit_length);
8283
8284 if (readbuf)
8285 arm_neon_quad_read (gdbarch, regcache, i,
8286 readbuf + i * unit_length);
8287 }
8288 else
8289 {
8290 char name_buf[4];
8291 int regnum;
8292
8293 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8294 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8295 strlen (name_buf));
8296 if (writebuf)
8297 regcache->cooked_write (regnum, writebuf + i * unit_length);
8298 if (readbuf)
8299 regcache->cooked_read (regnum, readbuf + i * unit_length);
8300 }
8301 }
8302 return RETURN_VALUE_REGISTER_CONVENTION;
8303 }
8304
8305 if (valtype->code () == TYPE_CODE_STRUCT
8306 || valtype->code () == TYPE_CODE_UNION
8307 || valtype->code () == TYPE_CODE_ARRAY)
8308 {
8309 if (tdep->struct_return == pcc_struct_return
8310 || arm_return_in_memory (gdbarch, valtype))
8311 return RETURN_VALUE_STRUCT_CONVENTION;
8312 }
8313 else if (valtype->code () == TYPE_CODE_COMPLEX)
8314 {
8315 if (arm_return_in_memory (gdbarch, valtype))
8316 return RETURN_VALUE_STRUCT_CONVENTION;
8317 }
8318
8319 if (writebuf)
8320 arm_store_return_value (valtype, regcache, writebuf);
8321
8322 if (readbuf)
8323 arm_extract_return_value (valtype, regcache, readbuf);
8324
8325 return RETURN_VALUE_REGISTER_CONVENTION;
8326 }
8327
8328
8329 static int
8330 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8331 {
8332 struct gdbarch *gdbarch = get_frame_arch (frame);
8333 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8334 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8335 CORE_ADDR jb_addr;
8336 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8337
8338 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8339
8340 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8341 ARM_INT_REGISTER_SIZE))
8342 return 0;
8343
8344 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8345 return 1;
8346 }
8347 /* A call to cmse secure entry function "foo" at "a" is modified by
8348 GNU ld as "b".
8349 a) bl xxxx <foo>
8350
8351 <foo>
8352 xxxx:
8353
8354 b) bl yyyy <__acle_se_foo>
8355
8356 section .gnu.sgstubs:
8357 <foo>
8358 yyyy: sg // secure gateway
8359 b.w xxxx <__acle_se_foo> // original_branch_dest
8360
8361 <__acle_se_foo>
8362 xxxx:
8363
8364 When the control at "b", the pc contains "yyyy" (sg address) which is a
8365 trampoline and does not exist in source code. This function returns the
8366 target pc "xxxx". For more details please refer to section 5.4
8367 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8368 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8369 document on www.developer.arm.com. */
8370
8371 static CORE_ADDR
8372 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8373 {
8374 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8375 char *target_name = (char *) alloca (target_len);
8376 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8377
8378 struct bound_minimal_symbol minsym
8379 = lookup_minimal_symbol (target_name, NULL, objfile);
8380
8381 if (minsym.minsym != nullptr)
8382 return BMSYMBOL_VALUE_ADDRESS (minsym);
8383
8384 return 0;
8385 }
8386
8387 /* Return true when SEC points to ".gnu.sgstubs" section. */
8388
8389 static bool
8390 arm_is_sgstubs_section (struct obj_section *sec)
8391 {
8392 return (sec != nullptr
8393 && sec->the_bfd_section != nullptr
8394 && sec->the_bfd_section->name != nullptr
8395 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8396 }
8397
8398 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8399 return the target PC. Otherwise return 0. */
8400
8401 CORE_ADDR
8402 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8403 {
8404 const char *name;
8405 int namelen;
8406 CORE_ADDR start_addr;
8407
8408 /* Find the starting address and name of the function containing the PC. */
8409 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8410 {
8411 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8412 check here. */
8413 start_addr = arm_skip_bx_reg (frame, pc);
8414 if (start_addr != 0)
8415 return start_addr;
8416
8417 return 0;
8418 }
8419
8420 /* If PC is in a Thumb call or return stub, return the address of the
8421 target PC, which is in a register. The thunk functions are called
8422 _call_via_xx, where x is the register name. The possible names
8423 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8424 functions, named __ARM_call_via_r[0-7]. */
8425 if (startswith (name, "_call_via_")
8426 || startswith (name, "__ARM_call_via_"))
8427 {
8428 /* Use the name suffix to determine which register contains the
8429 target PC. */
8430 static const char *table[15] =
8431 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8432 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8433 };
8434 int regno;
8435 int offset = strlen (name) - 2;
8436
8437 for (regno = 0; regno <= 14; regno++)
8438 if (strcmp (&name[offset], table[regno]) == 0)
8439 return get_frame_register_unsigned (frame, regno);
8440 }
8441
8442 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8443 non-interworking calls to foo. We could decode the stubs
8444 to find the target but it's easier to use the symbol table. */
8445 namelen = strlen (name);
8446 if (name[0] == '_' && name[1] == '_'
8447 && ((namelen > 2 + strlen ("_from_thumb")
8448 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8449 || (namelen > 2 + strlen ("_from_arm")
8450 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8451 {
8452 char *target_name;
8453 int target_len = namelen - 2;
8454 struct bound_minimal_symbol minsym;
8455 struct objfile *objfile;
8456 struct obj_section *sec;
8457
8458 if (name[namelen - 1] == 'b')
8459 target_len -= strlen ("_from_thumb");
8460 else
8461 target_len -= strlen ("_from_arm");
8462
8463 target_name = (char *) alloca (target_len + 1);
8464 memcpy (target_name, name + 2, target_len);
8465 target_name[target_len] = '\0';
8466
8467 sec = find_pc_section (pc);
8468 objfile = (sec == NULL) ? NULL : sec->objfile;
8469 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8470 if (minsym.minsym != NULL)
8471 return BMSYMBOL_VALUE_ADDRESS (minsym);
8472 else
8473 return 0;
8474 }
8475
8476 struct obj_section *section = find_pc_section (pc);
8477
8478 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8479 if (arm_is_sgstubs_section (section))
8480 return arm_skip_cmse_entry (pc, name, section->objfile);
8481
8482 return 0; /* not a stub */
8483 }
8484
8485 static void
8486 arm_update_current_architecture (void)
8487 {
8488 /* If the current architecture is not ARM, we have nothing to do. */
8489 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8490 return;
8491
8492 /* Update the architecture. */
8493 gdbarch_info info;
8494 if (!gdbarch_update_p (info))
8495 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8496 }
8497
8498 static void
8499 set_fp_model_sfunc (const char *args, int from_tty,
8500 struct cmd_list_element *c)
8501 {
8502 int fp_model;
8503
8504 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8505 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8506 {
8507 arm_fp_model = (enum arm_float_model) fp_model;
8508 break;
8509 }
8510
8511 if (fp_model == ARM_FLOAT_LAST)
8512 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8513 current_fp_model);
8514
8515 arm_update_current_architecture ();
8516 }
8517
8518 static void
8519 show_fp_model (struct ui_file *file, int from_tty,
8520 struct cmd_list_element *c, const char *value)
8521 {
8522 arm_gdbarch_tdep *tdep
8523 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8524
8525 if (arm_fp_model == ARM_FLOAT_AUTO
8526 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8527 fprintf_filtered (file, _("\
8528 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8529 fp_model_strings[tdep->fp_model]);
8530 else
8531 fprintf_filtered (file, _("\
8532 The current ARM floating point model is \"%s\".\n"),
8533 fp_model_strings[arm_fp_model]);
8534 }
8535
8536 static void
8537 arm_set_abi (const char *args, int from_tty,
8538 struct cmd_list_element *c)
8539 {
8540 int arm_abi;
8541
8542 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8543 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8544 {
8545 arm_abi_global = (enum arm_abi_kind) arm_abi;
8546 break;
8547 }
8548
8549 if (arm_abi == ARM_ABI_LAST)
8550 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8551 arm_abi_string);
8552
8553 arm_update_current_architecture ();
8554 }
8555
8556 static void
8557 arm_show_abi (struct ui_file *file, int from_tty,
8558 struct cmd_list_element *c, const char *value)
8559 {
8560 arm_gdbarch_tdep *tdep
8561 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8562
8563 if (arm_abi_global == ARM_ABI_AUTO
8564 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8565 fprintf_filtered (file, _("\
8566 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8567 arm_abi_strings[tdep->arm_abi]);
8568 else
8569 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8570 arm_abi_string);
8571 }
8572
8573 static void
8574 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8575 struct cmd_list_element *c, const char *value)
8576 {
8577 fprintf_filtered (file,
8578 _("The current execution mode assumed "
8579 "(when symbols are unavailable) is \"%s\".\n"),
8580 arm_fallback_mode_string);
8581 }
8582
8583 static void
8584 arm_show_force_mode (struct ui_file *file, int from_tty,
8585 struct cmd_list_element *c, const char *value)
8586 {
8587 fprintf_filtered (file,
8588 _("The current execution mode assumed "
8589 "(even when symbols are available) is \"%s\".\n"),
8590 arm_force_mode_string);
8591 }
8592
8593 /* If the user changes the register disassembly style used for info
8594 register and other commands, we have to also switch the style used
8595 in opcodes for disassembly output. This function is run in the "set
8596 arm disassembly" command, and does that. */
8597
8598 static void
8599 set_disassembly_style_sfunc (const char *args, int from_tty,
8600 struct cmd_list_element *c)
8601 {
8602 /* Convert the short style name into the long style name (eg, reg-names-*)
8603 before calling the generic set_disassembler_options() function. */
8604 std::string long_name = std::string ("reg-names-") + disassembly_style;
8605 set_disassembler_options (&long_name[0]);
8606 }
8607
8608 static void
8609 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8610 struct cmd_list_element *c, const char *value)
8611 {
8612 struct gdbarch *gdbarch = get_current_arch ();
8613 char *options = get_disassembler_options (gdbarch);
8614 const char *style = "";
8615 int len = 0;
8616 const char *opt;
8617
8618 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8619 if (startswith (opt, "reg-names-"))
8620 {
8621 style = &opt[strlen ("reg-names-")];
8622 len = strcspn (style, ",");
8623 }
8624
8625 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8626 }
8627 \f
8628 /* Return the ARM register name corresponding to register I. */
8629 static const char *
8630 arm_register_name (struct gdbarch *gdbarch, int i)
8631 {
8632 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8633
8634 if (is_s_pseudo (gdbarch, i))
8635 {
8636 static const char *const s_pseudo_names[] = {
8637 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8638 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8639 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8640 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8641 };
8642
8643 return s_pseudo_names[i - tdep->s_pseudo_base];
8644 }
8645
8646 if (is_q_pseudo (gdbarch, i))
8647 {
8648 static const char *const q_pseudo_names[] = {
8649 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8650 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8651 };
8652
8653 return q_pseudo_names[i - tdep->q_pseudo_base];
8654 }
8655
8656 if (is_mve_pseudo (gdbarch, i))
8657 return "p0";
8658
8659 if (i >= ARRAY_SIZE (arm_register_names))
8660 /* These registers are only supported on targets which supply
8661 an XML description. */
8662 return "";
8663
8664 /* Non-pseudo registers. */
8665 return arm_register_names[i];
8666 }
8667
8668 /* Test whether the coff symbol specific value corresponds to a Thumb
8669 function. */
8670
8671 static int
8672 coff_sym_is_thumb (int val)
8673 {
8674 return (val == C_THUMBEXT
8675 || val == C_THUMBSTAT
8676 || val == C_THUMBEXTFUNC
8677 || val == C_THUMBSTATFUNC
8678 || val == C_THUMBLABEL);
8679 }
8680
8681 /* arm_coff_make_msymbol_special()
8682 arm_elf_make_msymbol_special()
8683
8684 These functions test whether the COFF or ELF symbol corresponds to
8685 an address in thumb code, and set a "special" bit in a minimal
8686 symbol to indicate that it does. */
8687
8688 static void
8689 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8690 {
8691 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8692
8693 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8694 == ST_BRANCH_TO_THUMB)
8695 MSYMBOL_SET_SPECIAL (msym);
8696 }
8697
8698 static void
8699 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8700 {
8701 if (coff_sym_is_thumb (val))
8702 MSYMBOL_SET_SPECIAL (msym);
8703 }
8704
8705 static void
8706 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8707 asymbol *sym)
8708 {
8709 const char *name = bfd_asymbol_name (sym);
8710 struct arm_per_bfd *data;
8711 struct arm_mapping_symbol new_map_sym;
8712
8713 gdb_assert (name[0] == '$');
8714 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8715 return;
8716
8717 data = arm_bfd_data_key.get (objfile->obfd);
8718 if (data == NULL)
8719 data = arm_bfd_data_key.emplace (objfile->obfd,
8720 objfile->obfd->section_count);
8721 arm_mapping_symbol_vec &map
8722 = data->section_maps[bfd_asymbol_section (sym)->index];
8723
8724 new_map_sym.value = sym->value;
8725 new_map_sym.type = name[1];
8726
8727 /* Insert at the end, the vector will be sorted on first use. */
8728 map.push_back (new_map_sym);
8729 }
8730
8731 static void
8732 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8733 {
8734 struct gdbarch *gdbarch = regcache->arch ();
8735 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8736
8737 /* If necessary, set the T bit. */
8738 if (arm_apcs_32)
8739 {
8740 ULONGEST val, t_bit;
8741 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8742 t_bit = arm_psr_thumb_bit (gdbarch);
8743 if (arm_pc_is_thumb (gdbarch, pc))
8744 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8745 val | t_bit);
8746 else
8747 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8748 val & ~t_bit);
8749 }
8750 }
8751
8752 /* Read the contents of a NEON quad register, by reading from two
8753 double registers. This is used to implement the quad pseudo
8754 registers, and for argument passing in case the quad registers are
8755 missing; vectors are passed in quad registers when using the VFP
8756 ABI, even if a NEON unit is not present. REGNUM is the index of
8757 the quad register, in [0, 15]. */
8758
8759 static enum register_status
8760 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8761 int regnum, gdb_byte *buf)
8762 {
8763 char name_buf[4];
8764 gdb_byte reg_buf[8];
8765 int offset, double_regnum;
8766 enum register_status status;
8767
8768 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8769 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8770 strlen (name_buf));
8771
8772 /* d0 is always the least significant half of q0. */
8773 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8774 offset = 8;
8775 else
8776 offset = 0;
8777
8778 status = regcache->raw_read (double_regnum, reg_buf);
8779 if (status != REG_VALID)
8780 return status;
8781 memcpy (buf + offset, reg_buf, 8);
8782
8783 offset = 8 - offset;
8784 status = regcache->raw_read (double_regnum + 1, reg_buf);
8785 if (status != REG_VALID)
8786 return status;
8787 memcpy (buf + offset, reg_buf, 8);
8788
8789 return REG_VALID;
8790 }
8791
8792 /* Read the contents of the MVE pseudo register REGNUM and store it
8793 in BUF. */
8794
8795 static enum register_status
8796 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8797 int regnum, gdb_byte *buf)
8798 {
8799 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8800
8801 /* P0 is the first 16 bits of VPR. */
8802 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
8803 }
8804
8805 static enum register_status
8806 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8807 int regnum, gdb_byte *buf)
8808 {
8809 const int num_regs = gdbarch_num_regs (gdbarch);
8810 char name_buf[4];
8811 gdb_byte reg_buf[8];
8812 int offset, double_regnum;
8813 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8814
8815 gdb_assert (regnum >= num_regs);
8816
8817 if (is_q_pseudo (gdbarch, regnum))
8818 {
8819 /* Quad-precision register. */
8820 return arm_neon_quad_read (gdbarch, regcache,
8821 regnum - tdep->q_pseudo_base, buf);
8822 }
8823 else if (is_mve_pseudo (gdbarch, regnum))
8824 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
8825 else
8826 {
8827 enum register_status status;
8828
8829 regnum -= tdep->s_pseudo_base;
8830 /* Single-precision register. */
8831 gdb_assert (regnum < 32);
8832
8833 /* s0 is always the least significant half of d0. */
8834 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8835 offset = (regnum & 1) ? 0 : 4;
8836 else
8837 offset = (regnum & 1) ? 4 : 0;
8838
8839 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8840 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8841 strlen (name_buf));
8842
8843 status = regcache->raw_read (double_regnum, reg_buf);
8844 if (status == REG_VALID)
8845 memcpy (buf, reg_buf + offset, 4);
8846 return status;
8847 }
8848 }
8849
8850 /* Store the contents of BUF to a NEON quad register, by writing to
8851 two double registers. This is used to implement the quad pseudo
8852 registers, and for argument passing in case the quad registers are
8853 missing; vectors are passed in quad registers when using the VFP
8854 ABI, even if a NEON unit is not present. REGNUM is the index
8855 of the quad register, in [0, 15]. */
8856
8857 static void
8858 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8859 int regnum, const gdb_byte *buf)
8860 {
8861 char name_buf[4];
8862 int offset, double_regnum;
8863
8864 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8865 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8866 strlen (name_buf));
8867
8868 /* d0 is always the least significant half of q0. */
8869 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8870 offset = 8;
8871 else
8872 offset = 0;
8873
8874 regcache->raw_write (double_regnum, buf + offset);
8875 offset = 8 - offset;
8876 regcache->raw_write (double_regnum + 1, buf + offset);
8877 }
8878
8879 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
8880
8881 static void
8882 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8883 int regnum, const gdb_byte *buf)
8884 {
8885 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8886
8887 /* P0 is the first 16 bits of VPR. */
8888 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
8889 }
8890
8891 static void
8892 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8893 int regnum, const gdb_byte *buf)
8894 {
8895 const int num_regs = gdbarch_num_regs (gdbarch);
8896 char name_buf[4];
8897 gdb_byte reg_buf[8];
8898 int offset, double_regnum;
8899 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8900
8901 gdb_assert (regnum >= num_regs);
8902
8903 if (is_q_pseudo (gdbarch, regnum))
8904 {
8905 /* Quad-precision register. */
8906 arm_neon_quad_write (gdbarch, regcache,
8907 regnum - tdep->q_pseudo_base, buf);
8908 }
8909 else if (is_mve_pseudo (gdbarch, regnum))
8910 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
8911 else
8912 {
8913 regnum -= tdep->s_pseudo_base;
8914 /* Single-precision register. */
8915 gdb_assert (regnum < 32);
8916
8917 /* s0 is always the least significant half of d0. */
8918 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8919 offset = (regnum & 1) ? 0 : 4;
8920 else
8921 offset = (regnum & 1) ? 4 : 0;
8922
8923 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8924 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8925 strlen (name_buf));
8926
8927 regcache->raw_read (double_regnum, reg_buf);
8928 memcpy (reg_buf + offset, buf, 4);
8929 regcache->raw_write (double_regnum, reg_buf);
8930 }
8931 }
8932
8933 static struct value *
8934 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8935 {
8936 const int *reg_p = (const int *) baton;
8937 return value_of_register (*reg_p, frame);
8938 }
8939 \f
8940 static enum gdb_osabi
8941 arm_elf_osabi_sniffer (bfd *abfd)
8942 {
8943 unsigned int elfosabi;
8944 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8945
8946 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8947
8948 if (elfosabi == ELFOSABI_ARM)
8949 /* GNU tools use this value. Check note sections in this case,
8950 as well. */
8951 {
8952 for (asection *sect : gdb_bfd_sections (abfd))
8953 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8954 }
8955
8956 /* Anything else will be handled by the generic ELF sniffer. */
8957 return osabi;
8958 }
8959
8960 static int
8961 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8962 struct reggroup *group)
8963 {
8964 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8965 this, FPS register belongs to save_regroup, restore_reggroup, and
8966 all_reggroup, of course. */
8967 if (regnum == ARM_FPS_REGNUM)
8968 return (group == float_reggroup
8969 || group == save_reggroup
8970 || group == restore_reggroup
8971 || group == all_reggroup);
8972 else
8973 return default_register_reggroup_p (gdbarch, regnum, group);
8974 }
8975
8976 /* For backward-compatibility we allow two 'g' packet lengths with
8977 the remote protocol depending on whether FPA registers are
8978 supplied. M-profile targets do not have FPA registers, but some
8979 stubs already exist in the wild which use a 'g' packet which
8980 supplies them albeit with dummy values. The packet format which
8981 includes FPA registers should be considered deprecated for
8982 M-profile targets. */
8983
8984 static void
8985 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8986 {
8987 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8988
8989 if (tdep->is_m)
8990 {
8991 const target_desc *tdesc;
8992
8993 /* If we know from the executable this is an M-profile target,
8994 cater for remote targets whose register set layout is the
8995 same as the FPA layout. */
8996 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8997 register_remote_g_packet_guess (gdbarch,
8998 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8999 tdesc);
9000
9001 /* The regular M-profile layout. */
9002 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9003 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9004 tdesc);
9005
9006 /* M-profile plus M4F VFP. */
9007 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9008 register_remote_g_packet_guess (gdbarch,
9009 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9010 tdesc);
9011 /* M-profile plus MVE. */
9012 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9013 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9014 + ARM_VFP2_REGS_SIZE
9015 + ARM_INT_REGISTER_SIZE, tdesc);
9016 }
9017
9018 /* Otherwise we don't have a useful guess. */
9019 }
9020
9021 /* Implement the code_of_frame_writable gdbarch method. */
9022
9023 static int
9024 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9025 {
9026 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9027
9028 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9029 {
9030 /* M-profile exception frames return to some magic PCs, where
9031 isn't writable at all. */
9032 return 0;
9033 }
9034 else
9035 return 1;
9036 }
9037
9038 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9039 to be postfixed by a version (eg armv7hl). */
9040
9041 static const char *
9042 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9043 {
9044 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9045 return "arm(v[^- ]*)?";
9046 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9047 }
9048
9049 /* Initialize the current architecture based on INFO. If possible,
9050 re-use an architecture from ARCHES, which is a list of
9051 architectures already created during this debugging session.
9052
9053 Called e.g. at program startup, when reading a core file, and when
9054 reading a binary file. */
9055
9056 static struct gdbarch *
9057 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9058 {
9059 struct gdbarch *gdbarch;
9060 struct gdbarch_list *best_arch;
9061 enum arm_abi_kind arm_abi = arm_abi_global;
9062 enum arm_float_model fp_model = arm_fp_model;
9063 tdesc_arch_data_up tdesc_data;
9064 int i;
9065 bool is_m = false;
9066 int vfp_register_count = 0;
9067 bool have_s_pseudos = false, have_q_pseudos = false;
9068 bool have_wmmx_registers = false;
9069 bool have_neon = false;
9070 bool have_fpa_registers = true;
9071 const struct target_desc *tdesc = info.target_desc;
9072 bool have_vfp = false;
9073 bool have_mve = false;
9074 int mve_vpr_regnum = -1;
9075 int register_count = ARM_NUM_REGS;
9076
9077 /* If we have an object to base this architecture on, try to determine
9078 its ABI. */
9079
9080 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9081 {
9082 int ei_osabi, e_flags;
9083
9084 switch (bfd_get_flavour (info.abfd))
9085 {
9086 case bfd_target_coff_flavour:
9087 /* Assume it's an old APCS-style ABI. */
9088 /* XXX WinCE? */
9089 arm_abi = ARM_ABI_APCS;
9090 break;
9091
9092 case bfd_target_elf_flavour:
9093 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9094 e_flags = elf_elfheader (info.abfd)->e_flags;
9095
9096 if (ei_osabi == ELFOSABI_ARM)
9097 {
9098 /* GNU tools used to use this value, but do not for EABI
9099 objects. There's nowhere to tag an EABI version
9100 anyway, so assume APCS. */
9101 arm_abi = ARM_ABI_APCS;
9102 }
9103 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9104 {
9105 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9106
9107 switch (eabi_ver)
9108 {
9109 case EF_ARM_EABI_UNKNOWN:
9110 /* Assume GNU tools. */
9111 arm_abi = ARM_ABI_APCS;
9112 break;
9113
9114 case EF_ARM_EABI_VER4:
9115 case EF_ARM_EABI_VER5:
9116 arm_abi = ARM_ABI_AAPCS;
9117 /* EABI binaries default to VFP float ordering.
9118 They may also contain build attributes that can
9119 be used to identify if the VFP argument-passing
9120 ABI is in use. */
9121 if (fp_model == ARM_FLOAT_AUTO)
9122 {
9123 #ifdef HAVE_ELF
9124 switch (bfd_elf_get_obj_attr_int (info.abfd,
9125 OBJ_ATTR_PROC,
9126 Tag_ABI_VFP_args))
9127 {
9128 case AEABI_VFP_args_base:
9129 /* "The user intended FP parameter/result
9130 passing to conform to AAPCS, base
9131 variant". */
9132 fp_model = ARM_FLOAT_SOFT_VFP;
9133 break;
9134 case AEABI_VFP_args_vfp:
9135 /* "The user intended FP parameter/result
9136 passing to conform to AAPCS, VFP
9137 variant". */
9138 fp_model = ARM_FLOAT_VFP;
9139 break;
9140 case AEABI_VFP_args_toolchain:
9141 /* "The user intended FP parameter/result
9142 passing to conform to tool chain-specific
9143 conventions" - we don't know any such
9144 conventions, so leave it as "auto". */
9145 break;
9146 case AEABI_VFP_args_compatible:
9147 /* "Code is compatible with both the base
9148 and VFP variants; the user did not permit
9149 non-variadic functions to pass FP
9150 parameters/results" - leave it as
9151 "auto". */
9152 break;
9153 default:
9154 /* Attribute value not mentioned in the
9155 November 2012 ABI, so leave it as
9156 "auto". */
9157 break;
9158 }
9159 #else
9160 fp_model = ARM_FLOAT_SOFT_VFP;
9161 #endif
9162 }
9163 break;
9164
9165 default:
9166 /* Leave it as "auto". */
9167 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9168 break;
9169 }
9170
9171 #ifdef HAVE_ELF
9172 /* Detect M-profile programs. This only works if the
9173 executable file includes build attributes; GCC does
9174 copy them to the executable, but e.g. RealView does
9175 not. */
9176 int attr_arch
9177 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9178 Tag_CPU_arch);
9179 int attr_profile
9180 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9181 Tag_CPU_arch_profile);
9182
9183 /* GCC specifies the profile for v6-M; RealView only
9184 specifies the profile for architectures starting with
9185 V7 (as opposed to architectures with a tag
9186 numerically greater than TAG_CPU_ARCH_V7). */
9187 if (!tdesc_has_registers (tdesc)
9188 && (attr_arch == TAG_CPU_ARCH_V6_M
9189 || attr_arch == TAG_CPU_ARCH_V6S_M
9190 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9191 || attr_profile == 'M'))
9192 is_m = true;
9193 #endif
9194 }
9195
9196 if (fp_model == ARM_FLOAT_AUTO)
9197 {
9198 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9199 {
9200 case 0:
9201 /* Leave it as "auto". Strictly speaking this case
9202 means FPA, but almost nobody uses that now, and
9203 many toolchains fail to set the appropriate bits
9204 for the floating-point model they use. */
9205 break;
9206 case EF_ARM_SOFT_FLOAT:
9207 fp_model = ARM_FLOAT_SOFT_FPA;
9208 break;
9209 case EF_ARM_VFP_FLOAT:
9210 fp_model = ARM_FLOAT_VFP;
9211 break;
9212 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9213 fp_model = ARM_FLOAT_SOFT_VFP;
9214 break;
9215 }
9216 }
9217
9218 if (e_flags & EF_ARM_BE8)
9219 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9220
9221 break;
9222
9223 default:
9224 /* Leave it as "auto". */
9225 break;
9226 }
9227 }
9228
9229 /* Check any target description for validity. */
9230 if (tdesc_has_registers (tdesc))
9231 {
9232 /* For most registers we require GDB's default names; but also allow
9233 the numeric names for sp / lr / pc, as a convenience. */
9234 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9235 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9236 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9237
9238 const struct tdesc_feature *feature;
9239 int valid_p;
9240
9241 feature = tdesc_find_feature (tdesc,
9242 "org.gnu.gdb.arm.core");
9243 if (feature == NULL)
9244 {
9245 feature = tdesc_find_feature (tdesc,
9246 "org.gnu.gdb.arm.m-profile");
9247 if (feature == NULL)
9248 return NULL;
9249 else
9250 is_m = true;
9251 }
9252
9253 tdesc_data = tdesc_data_alloc ();
9254
9255 valid_p = 1;
9256 for (i = 0; i < ARM_SP_REGNUM; i++)
9257 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9258 arm_register_names[i]);
9259 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9260 ARM_SP_REGNUM,
9261 arm_sp_names);
9262 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9263 ARM_LR_REGNUM,
9264 arm_lr_names);
9265 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9266 ARM_PC_REGNUM,
9267 arm_pc_names);
9268 if (is_m)
9269 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9270 ARM_PS_REGNUM, "xpsr");
9271 else
9272 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9273 ARM_PS_REGNUM, "cpsr");
9274
9275 if (!valid_p)
9276 return NULL;
9277
9278 feature = tdesc_find_feature (tdesc,
9279 "org.gnu.gdb.arm.fpa");
9280 if (feature != NULL)
9281 {
9282 valid_p = 1;
9283 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9284 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9285 arm_register_names[i]);
9286 if (!valid_p)
9287 return NULL;
9288 }
9289 else
9290 have_fpa_registers = false;
9291
9292 feature = tdesc_find_feature (tdesc,
9293 "org.gnu.gdb.xscale.iwmmxt");
9294 if (feature != NULL)
9295 {
9296 static const char *const iwmmxt_names[] = {
9297 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9298 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9299 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9300 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9301 };
9302
9303 valid_p = 1;
9304 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9305 valid_p
9306 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9307 iwmmxt_names[i - ARM_WR0_REGNUM]);
9308
9309 /* Check for the control registers, but do not fail if they
9310 are missing. */
9311 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9312 tdesc_numbered_register (feature, tdesc_data.get (), i,
9313 iwmmxt_names[i - ARM_WR0_REGNUM]);
9314
9315 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9316 valid_p
9317 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9318 iwmmxt_names[i - ARM_WR0_REGNUM]);
9319
9320 if (!valid_p)
9321 return NULL;
9322
9323 have_wmmx_registers = true;
9324 }
9325
9326 /* If we have a VFP unit, check whether the single precision registers
9327 are present. If not, then we will synthesize them as pseudo
9328 registers. */
9329 feature = tdesc_find_feature (tdesc,
9330 "org.gnu.gdb.arm.vfp");
9331 if (feature != NULL)
9332 {
9333 static const char *const vfp_double_names[] = {
9334 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9335 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9336 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9337 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9338 };
9339
9340 /* Require the double precision registers. There must be either
9341 16 or 32. */
9342 valid_p = 1;
9343 for (i = 0; i < 32; i++)
9344 {
9345 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9346 ARM_D0_REGNUM + i,
9347 vfp_double_names[i]);
9348 if (!valid_p)
9349 break;
9350 }
9351 if (!valid_p && i == 16)
9352 valid_p = 1;
9353
9354 /* Also require FPSCR. */
9355 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9356 ARM_FPSCR_REGNUM, "fpscr");
9357 if (!valid_p)
9358 return NULL;
9359
9360 have_vfp = true;
9361
9362 if (tdesc_unnumbered_register (feature, "s0") == 0)
9363 have_s_pseudos = true;
9364
9365 vfp_register_count = i;
9366
9367 /* If we have VFP, also check for NEON. The architecture allows
9368 NEON without VFP (integer vector operations only), but GDB
9369 does not support that. */
9370 feature = tdesc_find_feature (tdesc,
9371 "org.gnu.gdb.arm.neon");
9372 if (feature != NULL)
9373 {
9374 /* NEON requires 32 double-precision registers. */
9375 if (i != 32)
9376 return NULL;
9377
9378 /* If there are quad registers defined by the stub, use
9379 their type; otherwise (normally) provide them with
9380 the default type. */
9381 if (tdesc_unnumbered_register (feature, "q0") == 0)
9382 have_q_pseudos = true;
9383 }
9384 }
9385
9386 /* Check for MVE after all the checks for GPR's, VFP and Neon.
9387 MVE (Helium) is an M-profile extension. */
9388 if (is_m)
9389 {
9390 /* Do we have the MVE feature? */
9391 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
9392
9393 if (feature != nullptr)
9394 {
9395 /* If we have MVE, we must always have the VPR register. */
9396 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9397 register_count, "vpr");
9398 if (!valid_p)
9399 {
9400 warning (_("MVE feature is missing required register vpr."));
9401 return nullptr;
9402 }
9403
9404 have_mve = true;
9405 mve_vpr_regnum = register_count;
9406 register_count++;
9407
9408 /* We can't have Q pseudo registers available here, as that
9409 would mean we have NEON features, and that is only available
9410 on A and R profiles. */
9411 gdb_assert (!have_q_pseudos);
9412
9413 /* Given we have a M-profile target description, if MVE is
9414 enabled and there are VFP registers, we should have Q
9415 pseudo registers (Q0 ~ Q7). */
9416 if (have_vfp)
9417 have_q_pseudos = true;
9418 }
9419 }
9420 }
9421
9422 /* If there is already a candidate, use it. */
9423 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9424 best_arch != NULL;
9425 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9426 {
9427 arm_gdbarch_tdep *tdep
9428 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
9429
9430 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
9431 continue;
9432
9433 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
9434 continue;
9435
9436 /* There are various other properties in tdep that we do not
9437 need to check here: those derived from a target description,
9438 since gdbarches with a different target description are
9439 automatically disqualified. */
9440
9441 /* Do check is_m, though, since it might come from the binary. */
9442 if (is_m != tdep->is_m)
9443 continue;
9444
9445 /* Found a match. */
9446 break;
9447 }
9448
9449 if (best_arch != NULL)
9450 return best_arch->gdbarch;
9451
9452 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
9453 gdbarch = gdbarch_alloc (&info, tdep);
9454
9455 /* Record additional information about the architecture we are defining.
9456 These are gdbarch discriminators, like the OSABI. */
9457 tdep->arm_abi = arm_abi;
9458 tdep->fp_model = fp_model;
9459 tdep->is_m = is_m;
9460 tdep->have_fpa_registers = have_fpa_registers;
9461 tdep->have_wmmx_registers = have_wmmx_registers;
9462 gdb_assert (vfp_register_count == 0
9463 || vfp_register_count == 16
9464 || vfp_register_count == 32);
9465 tdep->vfp_register_count = vfp_register_count;
9466 tdep->have_s_pseudos = have_s_pseudos;
9467 tdep->have_q_pseudos = have_q_pseudos;
9468 tdep->have_neon = have_neon;
9469
9470 /* Adjust the MVE feature settings. */
9471 if (have_mve)
9472 {
9473 tdep->have_mve = true;
9474 tdep->mve_vpr_regnum = mve_vpr_regnum;
9475 }
9476
9477 arm_register_g_packet_guesses (gdbarch);
9478
9479 /* Breakpoints. */
9480 switch (info.byte_order_for_code)
9481 {
9482 case BFD_ENDIAN_BIG:
9483 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9484 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9485 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9486 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9487
9488 break;
9489
9490 case BFD_ENDIAN_LITTLE:
9491 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9492 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9493 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9494 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9495
9496 break;
9497
9498 default:
9499 internal_error (__FILE__, __LINE__,
9500 _("arm_gdbarch_init: bad byte order for float format"));
9501 }
9502
9503 /* On ARM targets char defaults to unsigned. */
9504 set_gdbarch_char_signed (gdbarch, 0);
9505
9506 /* wchar_t is unsigned under the AAPCS. */
9507 if (tdep->arm_abi == ARM_ABI_AAPCS)
9508 set_gdbarch_wchar_signed (gdbarch, 0);
9509 else
9510 set_gdbarch_wchar_signed (gdbarch, 1);
9511
9512 /* Compute type alignment. */
9513 set_gdbarch_type_align (gdbarch, arm_type_align);
9514
9515 /* Note: for displaced stepping, this includes the breakpoint, and one word
9516 of additional scratch space. This setting isn't used for anything beside
9517 displaced stepping at present. */
9518 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9519
9520 /* This should be low enough for everything. */
9521 tdep->lowest_pc = 0x20;
9522 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9523
9524 /* The default, for both APCS and AAPCS, is to return small
9525 structures in registers. */
9526 tdep->struct_return = reg_struct_return;
9527
9528 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9529 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9530
9531 if (is_m)
9532 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9533
9534 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9535
9536 frame_base_set_default (gdbarch, &arm_normal_base);
9537
9538 /* Address manipulation. */
9539 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9540
9541 /* Advance PC across function entry code. */
9542 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9543
9544 /* Detect whether PC is at a point where the stack has been destroyed. */
9545 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9546
9547 /* Skip trampolines. */
9548 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9549
9550 /* The stack grows downward. */
9551 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9552
9553 /* Breakpoint manipulation. */
9554 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9555 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9556 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9557 arm_breakpoint_kind_from_current_state);
9558
9559 /* Information about registers, etc. */
9560 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9561 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9562 set_gdbarch_num_regs (gdbarch, register_count);
9563 set_gdbarch_register_type (gdbarch, arm_register_type);
9564 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9565
9566 /* This "info float" is FPA-specific. Use the generic version if we
9567 do not have FPA. */
9568 if (tdep->have_fpa_registers)
9569 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9570
9571 /* Internal <-> external register number maps. */
9572 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9573 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9574
9575 set_gdbarch_register_name (gdbarch, arm_register_name);
9576
9577 /* Returning results. */
9578 set_gdbarch_return_value (gdbarch, arm_return_value);
9579
9580 /* Disassembly. */
9581 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9582
9583 /* Minsymbol frobbing. */
9584 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9585 set_gdbarch_coff_make_msymbol_special (gdbarch,
9586 arm_coff_make_msymbol_special);
9587 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9588
9589 /* Thumb-2 IT block support. */
9590 set_gdbarch_adjust_breakpoint_address (gdbarch,
9591 arm_adjust_breakpoint_address);
9592
9593 /* Virtual tables. */
9594 set_gdbarch_vbit_in_delta (gdbarch, 1);
9595
9596 /* Hook in the ABI-specific overrides, if they have been registered. */
9597 gdbarch_init_osabi (info, gdbarch);
9598
9599 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9600
9601 /* Add some default predicates. */
9602 if (is_m)
9603 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9604 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9605 dwarf2_append_unwinders (gdbarch);
9606 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9607 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9608 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9609
9610 /* Now we have tuned the configuration, set a few final things,
9611 based on what the OS ABI has told us. */
9612
9613 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9614 binaries are always marked. */
9615 if (tdep->arm_abi == ARM_ABI_AUTO)
9616 tdep->arm_abi = ARM_ABI_APCS;
9617
9618 /* Watchpoints are not steppable. */
9619 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9620
9621 /* We used to default to FPA for generic ARM, but almost nobody
9622 uses that now, and we now provide a way for the user to force
9623 the model. So default to the most useful variant. */
9624 if (tdep->fp_model == ARM_FLOAT_AUTO)
9625 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9626
9627 if (tdep->jb_pc >= 0)
9628 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9629
9630 /* Floating point sizes and format. */
9631 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9632 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9633 {
9634 set_gdbarch_double_format
9635 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9636 set_gdbarch_long_double_format
9637 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9638 }
9639 else
9640 {
9641 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9642 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9643 }
9644
9645 if (tdesc_data != nullptr)
9646 {
9647 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9648
9649 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9650 register_count = gdbarch_num_regs (gdbarch);
9651
9652 /* Override tdesc_register_type to adjust the types of VFP
9653 registers for NEON. */
9654 set_gdbarch_register_type (gdbarch, arm_register_type);
9655 }
9656
9657 /* Initialize the pseudo register data. */
9658 int num_pseudos = 0;
9659 if (tdep->have_s_pseudos)
9660 {
9661 /* VFP single precision pseudo registers (S0~S31). */
9662 tdep->s_pseudo_base = register_count;
9663 tdep->s_pseudo_count = 32;
9664 num_pseudos += tdep->s_pseudo_count;
9665
9666 if (tdep->have_q_pseudos)
9667 {
9668 /* NEON quad precision pseudo registers (Q0~Q15). */
9669 tdep->q_pseudo_base = register_count + num_pseudos;
9670
9671 if (have_neon)
9672 tdep->q_pseudo_count = 16;
9673 else if (have_mve)
9674 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
9675
9676 num_pseudos += tdep->q_pseudo_count;
9677 }
9678 }
9679
9680 /* Do we have any MVE pseudo registers? */
9681 if (have_mve)
9682 {
9683 tdep->mve_pseudo_base = register_count + num_pseudos;
9684 tdep->mve_pseudo_count = 1;
9685 num_pseudos += tdep->mve_pseudo_count;
9686 }
9687
9688 /* Set some pseudo register hooks, if we have pseudo registers. */
9689 if (tdep->have_s_pseudos || have_mve)
9690 {
9691 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9692 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9693 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9694 }
9695
9696 /* Add standard register aliases. We add aliases even for those
9697 names which are used by the current architecture - it's simpler,
9698 and does no harm, since nothing ever lists user registers. */
9699 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9700 user_reg_add (gdbarch, arm_register_aliases[i].name,
9701 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9702
9703 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9704 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9705
9706 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9707
9708 return gdbarch;
9709 }
9710
9711 static void
9712 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9713 {
9714 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9715
9716 if (tdep == NULL)
9717 return;
9718
9719 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9720 (int) tdep->fp_model);
9721 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9722 (int) tdep->have_fpa_registers);
9723 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9724 (int) tdep->have_wmmx_registers);
9725 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9726 (int) tdep->vfp_register_count);
9727 fprintf_unfiltered (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
9728 tdep->have_s_pseudos? "true" : "false");
9729 fprintf_unfiltered (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
9730 (int) tdep->s_pseudo_base);
9731 fprintf_unfiltered (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
9732 (int) tdep->s_pseudo_count);
9733 fprintf_unfiltered (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
9734 tdep->have_q_pseudos? "true" : "false");
9735 fprintf_unfiltered (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
9736 (int) tdep->q_pseudo_base);
9737 fprintf_unfiltered (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
9738 (int) tdep->q_pseudo_count);
9739 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9740 (int) tdep->have_neon);
9741 fprintf_unfiltered (file, _("arm_dump_tdep: have_mve = %s\n"),
9742 tdep->have_mve? "yes" : "no");
9743 fprintf_unfiltered (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
9744 tdep->mve_vpr_regnum);
9745 fprintf_unfiltered (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
9746 tdep->mve_pseudo_base);
9747 fprintf_unfiltered (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
9748 tdep->mve_pseudo_count);
9749 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9750 (unsigned long) tdep->lowest_pc);
9751 }
9752
9753 #if GDB_SELF_TEST
9754 namespace selftests
9755 {
9756 static void arm_record_test (void);
9757 static void arm_analyze_prologue_test ();
9758 }
9759 #endif
9760
9761 void _initialize_arm_tdep ();
9762 void
9763 _initialize_arm_tdep ()
9764 {
9765 long length;
9766 int i, j;
9767 char regdesc[1024], *rdptr = regdesc;
9768 size_t rest = sizeof (regdesc);
9769
9770 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9771
9772 /* Add ourselves to objfile event chain. */
9773 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
9774
9775 /* Register an ELF OS ABI sniffer for ARM binaries. */
9776 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9777 bfd_target_elf_flavour,
9778 arm_elf_osabi_sniffer);
9779
9780 /* Add root prefix command for all "set arm"/"show arm" commands. */
9781 add_setshow_prefix_cmd ("arm", no_class,
9782 _("Various ARM-specific commands."),
9783 _("Various ARM-specific commands."),
9784 &setarmcmdlist, &showarmcmdlist,
9785 &setlist, &showlist);
9786
9787 arm_disassembler_options = xstrdup ("reg-names-std");
9788 const disasm_options_t *disasm_options
9789 = &disassembler_options_arm ()->options;
9790 int num_disassembly_styles = 0;
9791 for (i = 0; disasm_options->name[i] != NULL; i++)
9792 if (startswith (disasm_options->name[i], "reg-names-"))
9793 num_disassembly_styles++;
9794
9795 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9796 valid_disassembly_styles = XNEWVEC (const char *,
9797 num_disassembly_styles + 1);
9798 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9799 if (startswith (disasm_options->name[i], "reg-names-"))
9800 {
9801 size_t offset = strlen ("reg-names-");
9802 const char *style = disasm_options->name[i];
9803 valid_disassembly_styles[j++] = &style[offset];
9804 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9805 disasm_options->description[i]);
9806 rdptr += length;
9807 rest -= length;
9808 }
9809 /* Mark the end of valid options. */
9810 valid_disassembly_styles[num_disassembly_styles] = NULL;
9811
9812 /* Create the help text. */
9813 std::string helptext = string_printf ("%s%s%s",
9814 _("The valid values are:\n"),
9815 regdesc,
9816 _("The default is \"std\"."));
9817
9818 add_setshow_enum_cmd("disassembler", no_class,
9819 valid_disassembly_styles, &disassembly_style,
9820 _("Set the disassembly style."),
9821 _("Show the disassembly style."),
9822 helptext.c_str (),
9823 set_disassembly_style_sfunc,
9824 show_disassembly_style_sfunc,
9825 &setarmcmdlist, &showarmcmdlist);
9826
9827 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9828 _("Set usage of ARM 32-bit mode."),
9829 _("Show usage of ARM 32-bit mode."),
9830 _("When off, a 26-bit PC will be used."),
9831 NULL,
9832 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9833 mode is %s. */
9834 &setarmcmdlist, &showarmcmdlist);
9835
9836 /* Add a command to allow the user to force the FPU model. */
9837 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9838 _("Set the floating point type."),
9839 _("Show the floating point type."),
9840 _("auto - Determine the FP typefrom the OS-ABI.\n\
9841 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9842 fpa - FPA co-processor (GCC compiled).\n\
9843 softvfp - Software FP with pure-endian doubles.\n\
9844 vfp - VFP co-processor."),
9845 set_fp_model_sfunc, show_fp_model,
9846 &setarmcmdlist, &showarmcmdlist);
9847
9848 /* Add a command to allow the user to force the ABI. */
9849 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9850 _("Set the ABI."),
9851 _("Show the ABI."),
9852 NULL, arm_set_abi, arm_show_abi,
9853 &setarmcmdlist, &showarmcmdlist);
9854
9855 /* Add two commands to allow the user to force the assumed
9856 execution mode. */
9857 add_setshow_enum_cmd ("fallback-mode", class_support,
9858 arm_mode_strings, &arm_fallback_mode_string,
9859 _("Set the mode assumed when symbols are unavailable."),
9860 _("Show the mode assumed when symbols are unavailable."),
9861 NULL, NULL, arm_show_fallback_mode,
9862 &setarmcmdlist, &showarmcmdlist);
9863 add_setshow_enum_cmd ("force-mode", class_support,
9864 arm_mode_strings, &arm_force_mode_string,
9865 _("Set the mode assumed even when symbols are available."),
9866 _("Show the mode assumed even when symbols are available."),
9867 NULL, NULL, arm_show_force_mode,
9868 &setarmcmdlist, &showarmcmdlist);
9869
9870 /* Debugging flag. */
9871 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9872 _("Set ARM debugging."),
9873 _("Show ARM debugging."),
9874 _("When on, arm-specific debugging is enabled."),
9875 NULL,
9876 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9877 &setdebuglist, &showdebuglist);
9878
9879 #if GDB_SELF_TEST
9880 selftests::register_test ("arm-record", selftests::arm_record_test);
9881 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
9882 #endif
9883
9884 }
9885
9886 /* ARM-reversible process record data structures. */
9887
9888 #define ARM_INSN_SIZE_BYTES 4
9889 #define THUMB_INSN_SIZE_BYTES 2
9890 #define THUMB2_INSN_SIZE_BYTES 4
9891
9892
9893 /* Position of the bit within a 32-bit ARM instruction
9894 that defines whether the instruction is a load or store. */
9895 #define INSN_S_L_BIT_NUM 20
9896
9897 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9898 do \
9899 { \
9900 unsigned int reg_len = LENGTH; \
9901 if (reg_len) \
9902 { \
9903 REGS = XNEWVEC (uint32_t, reg_len); \
9904 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9905 } \
9906 } \
9907 while (0)
9908
9909 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9910 do \
9911 { \
9912 unsigned int mem_len = LENGTH; \
9913 if (mem_len) \
9914 { \
9915 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9916 memcpy(&MEMS->len, &RECORD_BUF[0], \
9917 sizeof(struct arm_mem_r) * LENGTH); \
9918 } \
9919 } \
9920 while (0)
9921
9922 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9923 #define INSN_RECORDED(ARM_RECORD) \
9924 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9925
9926 /* ARM memory record structure. */
9927 struct arm_mem_r
9928 {
9929 uint32_t len; /* Record length. */
9930 uint32_t addr; /* Memory address. */
9931 };
9932
9933 /* ARM instruction record contains opcode of current insn
9934 and execution state (before entry to decode_insn()),
9935 contains list of to-be-modified registers and
9936 memory blocks (on return from decode_insn()). */
9937
9938 typedef struct insn_decode_record_t
9939 {
9940 struct gdbarch *gdbarch;
9941 struct regcache *regcache;
9942 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9943 uint32_t arm_insn; /* Should accommodate thumb. */
9944 uint32_t cond; /* Condition code. */
9945 uint32_t opcode; /* Insn opcode. */
9946 uint32_t decode; /* Insn decode bits. */
9947 uint32_t mem_rec_count; /* No of mem records. */
9948 uint32_t reg_rec_count; /* No of reg records. */
9949 uint32_t *arm_regs; /* Registers to be saved for this record. */
9950 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9951 } insn_decode_record;
9952
9953
9954 /* Checks ARM SBZ and SBO mandatory fields. */
9955
9956 static int
9957 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9958 {
9959 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9960
9961 if (!len)
9962 return 1;
9963
9964 if (!sbo)
9965 ones = ~ones;
9966
9967 while (ones)
9968 {
9969 if (!(ones & sbo))
9970 {
9971 return 0;
9972 }
9973 ones = ones >> 1;
9974 }
9975 return 1;
9976 }
9977
9978 enum arm_record_result
9979 {
9980 ARM_RECORD_SUCCESS = 0,
9981 ARM_RECORD_FAILURE = 1
9982 };
9983
9984 typedef enum
9985 {
9986 ARM_RECORD_STRH=1,
9987 ARM_RECORD_STRD
9988 } arm_record_strx_t;
9989
9990 typedef enum
9991 {
9992 ARM_RECORD=1,
9993 THUMB_RECORD,
9994 THUMB2_RECORD
9995 } record_type_t;
9996
9997
9998 static int
9999 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10000 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10001 {
10002
10003 struct regcache *reg_cache = arm_insn_r->regcache;
10004 ULONGEST u_regval[2]= {0};
10005
10006 uint32_t reg_src1 = 0, reg_src2 = 0;
10007 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10008
10009 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10010 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10011
10012 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10013 {
10014 /* 1) Handle misc store, immediate offset. */
10015 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10016 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10017 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10018 regcache_raw_read_unsigned (reg_cache, reg_src1,
10019 &u_regval[0]);
10020 if (ARM_PC_REGNUM == reg_src1)
10021 {
10022 /* If R15 was used as Rn, hence current PC+8. */
10023 u_regval[0] = u_regval[0] + 8;
10024 }
10025 offset_8 = (immed_high << 4) | immed_low;
10026 /* Calculate target store address. */
10027 if (14 == arm_insn_r->opcode)
10028 {
10029 tgt_mem_addr = u_regval[0] + offset_8;
10030 }
10031 else
10032 {
10033 tgt_mem_addr = u_regval[0] - offset_8;
10034 }
10035 if (ARM_RECORD_STRH == str_type)
10036 {
10037 record_buf_mem[0] = 2;
10038 record_buf_mem[1] = tgt_mem_addr;
10039 arm_insn_r->mem_rec_count = 1;
10040 }
10041 else if (ARM_RECORD_STRD == str_type)
10042 {
10043 record_buf_mem[0] = 4;
10044 record_buf_mem[1] = tgt_mem_addr;
10045 record_buf_mem[2] = 4;
10046 record_buf_mem[3] = tgt_mem_addr + 4;
10047 arm_insn_r->mem_rec_count = 2;
10048 }
10049 }
10050 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10051 {
10052 /* 2) Store, register offset. */
10053 /* Get Rm. */
10054 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10055 /* Get Rn. */
10056 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10057 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10058 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10059 if (15 == reg_src2)
10060 {
10061 /* If R15 was used as Rn, hence current PC+8. */
10062 u_regval[0] = u_regval[0] + 8;
10063 }
10064 /* Calculate target store address, Rn +/- Rm, register offset. */
10065 if (12 == arm_insn_r->opcode)
10066 {
10067 tgt_mem_addr = u_regval[0] + u_regval[1];
10068 }
10069 else
10070 {
10071 tgt_mem_addr = u_regval[1] - u_regval[0];
10072 }
10073 if (ARM_RECORD_STRH == str_type)
10074 {
10075 record_buf_mem[0] = 2;
10076 record_buf_mem[1] = tgt_mem_addr;
10077 arm_insn_r->mem_rec_count = 1;
10078 }
10079 else if (ARM_RECORD_STRD == str_type)
10080 {
10081 record_buf_mem[0] = 4;
10082 record_buf_mem[1] = tgt_mem_addr;
10083 record_buf_mem[2] = 4;
10084 record_buf_mem[3] = tgt_mem_addr + 4;
10085 arm_insn_r->mem_rec_count = 2;
10086 }
10087 }
10088 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10089 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10090 {
10091 /* 3) Store, immediate pre-indexed. */
10092 /* 5) Store, immediate post-indexed. */
10093 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10094 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10095 offset_8 = (immed_high << 4) | immed_low;
10096 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10097 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10098 /* Calculate target store address, Rn +/- Rm, register offset. */
10099 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10100 {
10101 tgt_mem_addr = u_regval[0] + offset_8;
10102 }
10103 else
10104 {
10105 tgt_mem_addr = u_regval[0] - offset_8;
10106 }
10107 if (ARM_RECORD_STRH == str_type)
10108 {
10109 record_buf_mem[0] = 2;
10110 record_buf_mem[1] = tgt_mem_addr;
10111 arm_insn_r->mem_rec_count = 1;
10112 }
10113 else if (ARM_RECORD_STRD == str_type)
10114 {
10115 record_buf_mem[0] = 4;
10116 record_buf_mem[1] = tgt_mem_addr;
10117 record_buf_mem[2] = 4;
10118 record_buf_mem[3] = tgt_mem_addr + 4;
10119 arm_insn_r->mem_rec_count = 2;
10120 }
10121 /* Record Rn also as it changes. */
10122 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10123 arm_insn_r->reg_rec_count = 1;
10124 }
10125 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10126 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10127 {
10128 /* 4) Store, register pre-indexed. */
10129 /* 6) Store, register post -indexed. */
10130 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10131 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10132 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10133 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10134 /* Calculate target store address, Rn +/- Rm, register offset. */
10135 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10136 {
10137 tgt_mem_addr = u_regval[0] + u_regval[1];
10138 }
10139 else
10140 {
10141 tgt_mem_addr = u_regval[1] - u_regval[0];
10142 }
10143 if (ARM_RECORD_STRH == str_type)
10144 {
10145 record_buf_mem[0] = 2;
10146 record_buf_mem[1] = tgt_mem_addr;
10147 arm_insn_r->mem_rec_count = 1;
10148 }
10149 else if (ARM_RECORD_STRD == str_type)
10150 {
10151 record_buf_mem[0] = 4;
10152 record_buf_mem[1] = tgt_mem_addr;
10153 record_buf_mem[2] = 4;
10154 record_buf_mem[3] = tgt_mem_addr + 4;
10155 arm_insn_r->mem_rec_count = 2;
10156 }
10157 /* Record Rn also as it changes. */
10158 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10159 arm_insn_r->reg_rec_count = 1;
10160 }
10161 return 0;
10162 }
10163
10164 /* Handling ARM extension space insns. */
10165
10166 static int
10167 arm_record_extension_space (insn_decode_record *arm_insn_r)
10168 {
10169 int ret = 0; /* Return value: -1:record failure ; 0:success */
10170 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10171 uint32_t record_buf[8], record_buf_mem[8];
10172 uint32_t reg_src1 = 0;
10173 struct regcache *reg_cache = arm_insn_r->regcache;
10174 ULONGEST u_regval = 0;
10175
10176 gdb_assert (!INSN_RECORDED(arm_insn_r));
10177 /* Handle unconditional insn extension space. */
10178
10179 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10180 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10181 if (arm_insn_r->cond)
10182 {
10183 /* PLD has no affect on architectural state, it just affects
10184 the caches. */
10185 if (5 == ((opcode1 & 0xE0) >> 5))
10186 {
10187 /* BLX(1) */
10188 record_buf[0] = ARM_PS_REGNUM;
10189 record_buf[1] = ARM_LR_REGNUM;
10190 arm_insn_r->reg_rec_count = 2;
10191 }
10192 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10193 }
10194
10195
10196 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10197 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10198 {
10199 ret = -1;
10200 /* Undefined instruction on ARM V5; need to handle if later
10201 versions define it. */
10202 }
10203
10204 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10205 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10206 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10207
10208 /* Handle arithmetic insn extension space. */
10209 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10210 && !INSN_RECORDED(arm_insn_r))
10211 {
10212 /* Handle MLA(S) and MUL(S). */
10213 if (in_inclusive_range (insn_op1, 0U, 3U))
10214 {
10215 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10216 record_buf[1] = ARM_PS_REGNUM;
10217 arm_insn_r->reg_rec_count = 2;
10218 }
10219 else if (in_inclusive_range (insn_op1, 4U, 15U))
10220 {
10221 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10223 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10224 record_buf[2] = ARM_PS_REGNUM;
10225 arm_insn_r->reg_rec_count = 3;
10226 }
10227 }
10228
10229 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10230 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10231 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10232
10233 /* Handle control insn extension space. */
10234
10235 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10236 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10237 {
10238 if (!bit (arm_insn_r->arm_insn,25))
10239 {
10240 if (!bits (arm_insn_r->arm_insn, 4, 7))
10241 {
10242 if ((0 == insn_op1) || (2 == insn_op1))
10243 {
10244 /* MRS. */
10245 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10246 arm_insn_r->reg_rec_count = 1;
10247 }
10248 else if (1 == insn_op1)
10249 {
10250 /* CSPR is going to be changed. */
10251 record_buf[0] = ARM_PS_REGNUM;
10252 arm_insn_r->reg_rec_count = 1;
10253 }
10254 else if (3 == insn_op1)
10255 {
10256 /* SPSR is going to be changed. */
10257 /* We need to get SPSR value, which is yet to be done. */
10258 return -1;
10259 }
10260 }
10261 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10262 {
10263 if (1 == insn_op1)
10264 {
10265 /* BX. */
10266 record_buf[0] = ARM_PS_REGNUM;
10267 arm_insn_r->reg_rec_count = 1;
10268 }
10269 else if (3 == insn_op1)
10270 {
10271 /* CLZ. */
10272 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10273 arm_insn_r->reg_rec_count = 1;
10274 }
10275 }
10276 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10277 {
10278 /* BLX. */
10279 record_buf[0] = ARM_PS_REGNUM;
10280 record_buf[1] = ARM_LR_REGNUM;
10281 arm_insn_r->reg_rec_count = 2;
10282 }
10283 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10284 {
10285 /* QADD, QSUB, QDADD, QDSUB */
10286 record_buf[0] = ARM_PS_REGNUM;
10287 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10288 arm_insn_r->reg_rec_count = 2;
10289 }
10290 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10291 {
10292 /* BKPT. */
10293 record_buf[0] = ARM_PS_REGNUM;
10294 record_buf[1] = ARM_LR_REGNUM;
10295 arm_insn_r->reg_rec_count = 2;
10296
10297 /* Save SPSR also;how? */
10298 return -1;
10299 }
10300 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10301 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10302 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10303 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10304 )
10305 {
10306 if (0 == insn_op1 || 1 == insn_op1)
10307 {
10308 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10309 /* We dont do optimization for SMULW<y> where we
10310 need only Rd. */
10311 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10312 record_buf[1] = ARM_PS_REGNUM;
10313 arm_insn_r->reg_rec_count = 2;
10314 }
10315 else if (2 == insn_op1)
10316 {
10317 /* SMLAL<x><y>. */
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10319 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10320 arm_insn_r->reg_rec_count = 2;
10321 }
10322 else if (3 == insn_op1)
10323 {
10324 /* SMUL<x><y>. */
10325 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10326 arm_insn_r->reg_rec_count = 1;
10327 }
10328 }
10329 }
10330 else
10331 {
10332 /* MSR : immediate form. */
10333 if (1 == insn_op1)
10334 {
10335 /* CSPR is going to be changed. */
10336 record_buf[0] = ARM_PS_REGNUM;
10337 arm_insn_r->reg_rec_count = 1;
10338 }
10339 else if (3 == insn_op1)
10340 {
10341 /* SPSR is going to be changed. */
10342 /* we need to get SPSR value, which is yet to be done */
10343 return -1;
10344 }
10345 }
10346 }
10347
10348 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10349 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10350 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10351
10352 /* Handle load/store insn extension space. */
10353
10354 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10355 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10356 && !INSN_RECORDED(arm_insn_r))
10357 {
10358 /* SWP/SWPB. */
10359 if (0 == insn_op1)
10360 {
10361 /* These insn, changes register and memory as well. */
10362 /* SWP or SWPB insn. */
10363 /* Get memory address given by Rn. */
10364 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10365 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10366 /* SWP insn ?, swaps word. */
10367 if (8 == arm_insn_r->opcode)
10368 {
10369 record_buf_mem[0] = 4;
10370 }
10371 else
10372 {
10373 /* SWPB insn, swaps only byte. */
10374 record_buf_mem[0] = 1;
10375 }
10376 record_buf_mem[1] = u_regval;
10377 arm_insn_r->mem_rec_count = 1;
10378 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10379 arm_insn_r->reg_rec_count = 1;
10380 }
10381 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10382 {
10383 /* STRH. */
10384 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10385 ARM_RECORD_STRH);
10386 }
10387 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10388 {
10389 /* LDRD. */
10390 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10391 record_buf[1] = record_buf[0] + 1;
10392 arm_insn_r->reg_rec_count = 2;
10393 }
10394 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10395 {
10396 /* STRD. */
10397 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10398 ARM_RECORD_STRD);
10399 }
10400 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10401 {
10402 /* LDRH, LDRSB, LDRSH. */
10403 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10404 arm_insn_r->reg_rec_count = 1;
10405 }
10406
10407 }
10408
10409 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10410 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10411 && !INSN_RECORDED(arm_insn_r))
10412 {
10413 ret = -1;
10414 /* Handle coprocessor insn extension space. */
10415 }
10416
10417 /* To be done for ARMv5 and later; as of now we return -1. */
10418 if (-1 == ret)
10419 return ret;
10420
10421 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10422 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10423
10424 return ret;
10425 }
10426
10427 /* Handling opcode 000 insns. */
10428
10429 static int
10430 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10431 {
10432 struct regcache *reg_cache = arm_insn_r->regcache;
10433 uint32_t record_buf[8], record_buf_mem[8];
10434 ULONGEST u_regval[2] = {0};
10435
10436 uint32_t reg_src1 = 0;
10437 uint32_t opcode1 = 0;
10438
10439 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10440 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10441 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10442
10443 if (!((opcode1 & 0x19) == 0x10))
10444 {
10445 /* Data-processing (register) and Data-processing (register-shifted
10446 register */
10447 /* Out of 11 shifter operands mode, all the insn modifies destination
10448 register, which is specified by 13-16 decode. */
10449 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10450 record_buf[1] = ARM_PS_REGNUM;
10451 arm_insn_r->reg_rec_count = 2;
10452 }
10453 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10454 {
10455 /* Miscellaneous instructions */
10456
10457 if (3 == arm_insn_r->decode && 0x12 == opcode1
10458 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10459 {
10460 /* Handle BLX, branch and link/exchange. */
10461 if (9 == arm_insn_r->opcode)
10462 {
10463 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10464 and R14 stores the return address. */
10465 record_buf[0] = ARM_PS_REGNUM;
10466 record_buf[1] = ARM_LR_REGNUM;
10467 arm_insn_r->reg_rec_count = 2;
10468 }
10469 }
10470 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10471 {
10472 /* Handle enhanced software breakpoint insn, BKPT. */
10473 /* CPSR is changed to be executed in ARM state, disabling normal
10474 interrupts, entering abort mode. */
10475 /* According to high vector configuration PC is set. */
10476 /* user hit breakpoint and type reverse, in
10477 that case, we need to go back with previous CPSR and
10478 Program Counter. */
10479 record_buf[0] = ARM_PS_REGNUM;
10480 record_buf[1] = ARM_LR_REGNUM;
10481 arm_insn_r->reg_rec_count = 2;
10482
10483 /* Save SPSR also; how? */
10484 return -1;
10485 }
10486 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10487 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10488 {
10489 /* Handle BX, branch and link/exchange. */
10490 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10491 record_buf[0] = ARM_PS_REGNUM;
10492 arm_insn_r->reg_rec_count = 1;
10493 }
10494 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10495 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10496 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10497 {
10498 /* Count leading zeros: CLZ. */
10499 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10500 arm_insn_r->reg_rec_count = 1;
10501 }
10502 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10503 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10504 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10505 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10506 {
10507 /* Handle MRS insn. */
10508 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10509 arm_insn_r->reg_rec_count = 1;
10510 }
10511 }
10512 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10513 {
10514 /* Multiply and multiply-accumulate */
10515
10516 /* Handle multiply instructions. */
10517 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10518 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10519 {
10520 /* Handle MLA and MUL. */
10521 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10522 record_buf[1] = ARM_PS_REGNUM;
10523 arm_insn_r->reg_rec_count = 2;
10524 }
10525 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10526 {
10527 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10528 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10529 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10530 record_buf[2] = ARM_PS_REGNUM;
10531 arm_insn_r->reg_rec_count = 3;
10532 }
10533 }
10534 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10535 {
10536 /* Synchronization primitives */
10537
10538 /* Handling SWP, SWPB. */
10539 /* These insn, changes register and memory as well. */
10540 /* SWP or SWPB insn. */
10541
10542 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10543 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10544 /* SWP insn ?, swaps word. */
10545 if (8 == arm_insn_r->opcode)
10546 {
10547 record_buf_mem[0] = 4;
10548 }
10549 else
10550 {
10551 /* SWPB insn, swaps only byte. */
10552 record_buf_mem[0] = 1;
10553 }
10554 record_buf_mem[1] = u_regval[0];
10555 arm_insn_r->mem_rec_count = 1;
10556 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10557 arm_insn_r->reg_rec_count = 1;
10558 }
10559 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10560 || 15 == arm_insn_r->decode)
10561 {
10562 if ((opcode1 & 0x12) == 2)
10563 {
10564 /* Extra load/store (unprivileged) */
10565 return -1;
10566 }
10567 else
10568 {
10569 /* Extra load/store */
10570 switch (bits (arm_insn_r->arm_insn, 5, 6))
10571 {
10572 case 1:
10573 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10574 {
10575 /* STRH (register), STRH (immediate) */
10576 arm_record_strx (arm_insn_r, &record_buf[0],
10577 &record_buf_mem[0], ARM_RECORD_STRH);
10578 }
10579 else if ((opcode1 & 0x05) == 0x1)
10580 {
10581 /* LDRH (register) */
10582 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10583 arm_insn_r->reg_rec_count = 1;
10584
10585 if (bit (arm_insn_r->arm_insn, 21))
10586 {
10587 /* Write back to Rn. */
10588 record_buf[arm_insn_r->reg_rec_count++]
10589 = bits (arm_insn_r->arm_insn, 16, 19);
10590 }
10591 }
10592 else if ((opcode1 & 0x05) == 0x5)
10593 {
10594 /* LDRH (immediate), LDRH (literal) */
10595 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10596
10597 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10598 arm_insn_r->reg_rec_count = 1;
10599
10600 if (rn != 15)
10601 {
10602 /*LDRH (immediate) */
10603 if (bit (arm_insn_r->arm_insn, 21))
10604 {
10605 /* Write back to Rn. */
10606 record_buf[arm_insn_r->reg_rec_count++] = rn;
10607 }
10608 }
10609 }
10610 else
10611 return -1;
10612 break;
10613 case 2:
10614 if ((opcode1 & 0x05) == 0x0)
10615 {
10616 /* LDRD (register) */
10617 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10618 record_buf[1] = record_buf[0] + 1;
10619 arm_insn_r->reg_rec_count = 2;
10620
10621 if (bit (arm_insn_r->arm_insn, 21))
10622 {
10623 /* Write back to Rn. */
10624 record_buf[arm_insn_r->reg_rec_count++]
10625 = bits (arm_insn_r->arm_insn, 16, 19);
10626 }
10627 }
10628 else if ((opcode1 & 0x05) == 0x1)
10629 {
10630 /* LDRSB (register) */
10631 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10632 arm_insn_r->reg_rec_count = 1;
10633
10634 if (bit (arm_insn_r->arm_insn, 21))
10635 {
10636 /* Write back to Rn. */
10637 record_buf[arm_insn_r->reg_rec_count++]
10638 = bits (arm_insn_r->arm_insn, 16, 19);
10639 }
10640 }
10641 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10642 {
10643 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10644 LDRSB (literal) */
10645 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10646
10647 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10648 arm_insn_r->reg_rec_count = 1;
10649
10650 if (rn != 15)
10651 {
10652 /*LDRD (immediate), LDRSB (immediate) */
10653 if (bit (arm_insn_r->arm_insn, 21))
10654 {
10655 /* Write back to Rn. */
10656 record_buf[arm_insn_r->reg_rec_count++] = rn;
10657 }
10658 }
10659 }
10660 else
10661 return -1;
10662 break;
10663 case 3:
10664 if ((opcode1 & 0x05) == 0x0)
10665 {
10666 /* STRD (register) */
10667 arm_record_strx (arm_insn_r, &record_buf[0],
10668 &record_buf_mem[0], ARM_RECORD_STRD);
10669 }
10670 else if ((opcode1 & 0x05) == 0x1)
10671 {
10672 /* LDRSH (register) */
10673 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10674 arm_insn_r->reg_rec_count = 1;
10675
10676 if (bit (arm_insn_r->arm_insn, 21))
10677 {
10678 /* Write back to Rn. */
10679 record_buf[arm_insn_r->reg_rec_count++]
10680 = bits (arm_insn_r->arm_insn, 16, 19);
10681 }
10682 }
10683 else if ((opcode1 & 0x05) == 0x4)
10684 {
10685 /* STRD (immediate) */
10686 arm_record_strx (arm_insn_r, &record_buf[0],
10687 &record_buf_mem[0], ARM_RECORD_STRD);
10688 }
10689 else if ((opcode1 & 0x05) == 0x5)
10690 {
10691 /* LDRSH (immediate), LDRSH (literal) */
10692 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10693 arm_insn_r->reg_rec_count = 1;
10694
10695 if (bit (arm_insn_r->arm_insn, 21))
10696 {
10697 /* Write back to Rn. */
10698 record_buf[arm_insn_r->reg_rec_count++]
10699 = bits (arm_insn_r->arm_insn, 16, 19);
10700 }
10701 }
10702 else
10703 return -1;
10704 break;
10705 default:
10706 return -1;
10707 }
10708 }
10709 }
10710 else
10711 {
10712 return -1;
10713 }
10714
10715 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10716 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10717 return 0;
10718 }
10719
10720 /* Handling opcode 001 insns. */
10721
10722 static int
10723 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10724 {
10725 uint32_t record_buf[8], record_buf_mem[8];
10726
10727 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10728 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10729
10730 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10731 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10732 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10733 )
10734 {
10735 /* Handle MSR insn. */
10736 if (9 == arm_insn_r->opcode)
10737 {
10738 /* CSPR is going to be changed. */
10739 record_buf[0] = ARM_PS_REGNUM;
10740 arm_insn_r->reg_rec_count = 1;
10741 }
10742 else
10743 {
10744 /* SPSR is going to be changed. */
10745 }
10746 }
10747 else if (arm_insn_r->opcode <= 15)
10748 {
10749 /* Normal data processing insns. */
10750 /* Out of 11 shifter operands mode, all the insn modifies destination
10751 register, which is specified by 13-16 decode. */
10752 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10753 record_buf[1] = ARM_PS_REGNUM;
10754 arm_insn_r->reg_rec_count = 2;
10755 }
10756 else
10757 {
10758 return -1;
10759 }
10760
10761 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10762 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10763 return 0;
10764 }
10765
10766 static int
10767 arm_record_media (insn_decode_record *arm_insn_r)
10768 {
10769 uint32_t record_buf[8];
10770
10771 switch (bits (arm_insn_r->arm_insn, 22, 24))
10772 {
10773 case 0:
10774 /* Parallel addition and subtraction, signed */
10775 case 1:
10776 /* Parallel addition and subtraction, unsigned */
10777 case 2:
10778 case 3:
10779 /* Packing, unpacking, saturation and reversal */
10780 {
10781 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10782
10783 record_buf[arm_insn_r->reg_rec_count++] = rd;
10784 }
10785 break;
10786
10787 case 4:
10788 case 5:
10789 /* Signed multiplies */
10790 {
10791 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10792 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10793
10794 record_buf[arm_insn_r->reg_rec_count++] = rd;
10795 if (op1 == 0x0)
10796 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10797 else if (op1 == 0x4)
10798 record_buf[arm_insn_r->reg_rec_count++]
10799 = bits (arm_insn_r->arm_insn, 12, 15);
10800 }
10801 break;
10802
10803 case 6:
10804 {
10805 if (bit (arm_insn_r->arm_insn, 21)
10806 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10807 {
10808 /* SBFX */
10809 record_buf[arm_insn_r->reg_rec_count++]
10810 = bits (arm_insn_r->arm_insn, 12, 15);
10811 }
10812 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10813 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10814 {
10815 /* USAD8 and USADA8 */
10816 record_buf[arm_insn_r->reg_rec_count++]
10817 = bits (arm_insn_r->arm_insn, 16, 19);
10818 }
10819 }
10820 break;
10821
10822 case 7:
10823 {
10824 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10825 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10826 {
10827 /* Permanently UNDEFINED */
10828 return -1;
10829 }
10830 else
10831 {
10832 /* BFC, BFI and UBFX */
10833 record_buf[arm_insn_r->reg_rec_count++]
10834 = bits (arm_insn_r->arm_insn, 12, 15);
10835 }
10836 }
10837 break;
10838
10839 default:
10840 return -1;
10841 }
10842
10843 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10844
10845 return 0;
10846 }
10847
10848 /* Handle ARM mode instructions with opcode 010. */
10849
10850 static int
10851 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10852 {
10853 struct regcache *reg_cache = arm_insn_r->regcache;
10854
10855 uint32_t reg_base , reg_dest;
10856 uint32_t offset_12, tgt_mem_addr;
10857 uint32_t record_buf[8], record_buf_mem[8];
10858 unsigned char wback;
10859 ULONGEST u_regval;
10860
10861 /* Calculate wback. */
10862 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10863 || (bit (arm_insn_r->arm_insn, 21) == 1);
10864
10865 arm_insn_r->reg_rec_count = 0;
10866 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10867
10868 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10869 {
10870 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10871 and LDRT. */
10872
10873 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10874 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10875
10876 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10877 preceeds a LDR instruction having R15 as reg_base, it
10878 emulates a branch and link instruction, and hence we need to save
10879 CPSR and PC as well. */
10880 if (ARM_PC_REGNUM == reg_dest)
10881 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10882
10883 /* If wback is true, also save the base register, which is going to be
10884 written to. */
10885 if (wback)
10886 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10887 }
10888 else
10889 {
10890 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10891
10892 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10893 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10894
10895 /* Handle bit U. */
10896 if (bit (arm_insn_r->arm_insn, 23))
10897 {
10898 /* U == 1: Add the offset. */
10899 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10900 }
10901 else
10902 {
10903 /* U == 0: subtract the offset. */
10904 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10905 }
10906
10907 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10908 bytes. */
10909 if (bit (arm_insn_r->arm_insn, 22))
10910 {
10911 /* STRB and STRBT: 1 byte. */
10912 record_buf_mem[0] = 1;
10913 }
10914 else
10915 {
10916 /* STR and STRT: 4 bytes. */
10917 record_buf_mem[0] = 4;
10918 }
10919
10920 /* Handle bit P. */
10921 if (bit (arm_insn_r->arm_insn, 24))
10922 record_buf_mem[1] = tgt_mem_addr;
10923 else
10924 record_buf_mem[1] = (uint32_t) u_regval;
10925
10926 arm_insn_r->mem_rec_count = 1;
10927
10928 /* If wback is true, also save the base register, which is going to be
10929 written to. */
10930 if (wback)
10931 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10932 }
10933
10934 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10935 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10936 return 0;
10937 }
10938
10939 /* Handling opcode 011 insns. */
10940
10941 static int
10942 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10943 {
10944 struct regcache *reg_cache = arm_insn_r->regcache;
10945
10946 uint32_t shift_imm = 0;
10947 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10948 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10949 uint32_t record_buf[8], record_buf_mem[8];
10950
10951 LONGEST s_word;
10952 ULONGEST u_regval[2];
10953
10954 if (bit (arm_insn_r->arm_insn, 4))
10955 return arm_record_media (arm_insn_r);
10956
10957 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10958 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10959
10960 /* Handle enhanced store insns and LDRD DSP insn,
10961 order begins according to addressing modes for store insns
10962 STRH insn. */
10963
10964 /* LDR or STR? */
10965 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10966 {
10967 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10968 /* LDR insn has a capability to do branching, if
10969 MOV LR, PC is preceded by LDR insn having Rn as R15
10970 in that case, it emulates branch and link insn, and hence we
10971 need to save CSPR and PC as well. */
10972 if (15 != reg_dest)
10973 {
10974 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10975 arm_insn_r->reg_rec_count = 1;
10976 }
10977 else
10978 {
10979 record_buf[0] = reg_dest;
10980 record_buf[1] = ARM_PS_REGNUM;
10981 arm_insn_r->reg_rec_count = 2;
10982 }
10983 }
10984 else
10985 {
10986 if (! bits (arm_insn_r->arm_insn, 4, 11))
10987 {
10988 /* Store insn, register offset and register pre-indexed,
10989 register post-indexed. */
10990 /* Get Rm. */
10991 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10992 /* Get Rn. */
10993 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10994 regcache_raw_read_unsigned (reg_cache, reg_src1
10995 , &u_regval[0]);
10996 regcache_raw_read_unsigned (reg_cache, reg_src2
10997 , &u_regval[1]);
10998 if (15 == reg_src2)
10999 {
11000 /* If R15 was used as Rn, hence current PC+8. */
11001 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11002 u_regval[0] = u_regval[0] + 8;
11003 }
11004 /* Calculate target store address, Rn +/- Rm, register offset. */
11005 /* U == 1. */
11006 if (bit (arm_insn_r->arm_insn, 23))
11007 {
11008 tgt_mem_addr = u_regval[0] + u_regval[1];
11009 }
11010 else
11011 {
11012 tgt_mem_addr = u_regval[1] - u_regval[0];
11013 }
11014
11015 switch (arm_insn_r->opcode)
11016 {
11017 /* STR. */
11018 case 8:
11019 case 12:
11020 /* STR. */
11021 case 9:
11022 case 13:
11023 /* STRT. */
11024 case 1:
11025 case 5:
11026 /* STR. */
11027 case 0:
11028 case 4:
11029 record_buf_mem[0] = 4;
11030 break;
11031
11032 /* STRB. */
11033 case 10:
11034 case 14:
11035 /* STRB. */
11036 case 11:
11037 case 15:
11038 /* STRBT. */
11039 case 3:
11040 case 7:
11041 /* STRB. */
11042 case 2:
11043 case 6:
11044 record_buf_mem[0] = 1;
11045 break;
11046
11047 default:
11048 gdb_assert_not_reached ("no decoding pattern found");
11049 break;
11050 }
11051 record_buf_mem[1] = tgt_mem_addr;
11052 arm_insn_r->mem_rec_count = 1;
11053
11054 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11055 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11056 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11057 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11058 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11059 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11060 )
11061 {
11062 /* Rn is going to be changed in pre-indexed mode and
11063 post-indexed mode as well. */
11064 record_buf[0] = reg_src2;
11065 arm_insn_r->reg_rec_count = 1;
11066 }
11067 }
11068 else
11069 {
11070 /* Store insn, scaled register offset; scaled pre-indexed. */
11071 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11072 /* Get Rm. */
11073 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11074 /* Get Rn. */
11075 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11076 /* Get shift_imm. */
11077 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11078 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11079 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11080 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11081 /* Offset_12 used as shift. */
11082 switch (offset_12)
11083 {
11084 case 0:
11085 /* Offset_12 used as index. */
11086 offset_12 = u_regval[0] << shift_imm;
11087 break;
11088
11089 case 1:
11090 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11091 break;
11092
11093 case 2:
11094 if (!shift_imm)
11095 {
11096 if (bit (u_regval[0], 31))
11097 {
11098 offset_12 = 0xFFFFFFFF;
11099 }
11100 else
11101 {
11102 offset_12 = 0;
11103 }
11104 }
11105 else
11106 {
11107 /* This is arithmetic shift. */
11108 offset_12 = s_word >> shift_imm;
11109 }
11110 break;
11111
11112 case 3:
11113 if (!shift_imm)
11114 {
11115 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11116 &u_regval[1]);
11117 /* Get C flag value and shift it by 31. */
11118 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11119 | (u_regval[0]) >> 1);
11120 }
11121 else
11122 {
11123 offset_12 = (u_regval[0] >> shift_imm) \
11124 | (u_regval[0] <<
11125 (sizeof(uint32_t) - shift_imm));
11126 }
11127 break;
11128
11129 default:
11130 gdb_assert_not_reached ("no decoding pattern found");
11131 break;
11132 }
11133
11134 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11135 /* bit U set. */
11136 if (bit (arm_insn_r->arm_insn, 23))
11137 {
11138 tgt_mem_addr = u_regval[1] + offset_12;
11139 }
11140 else
11141 {
11142 tgt_mem_addr = u_regval[1] - offset_12;
11143 }
11144
11145 switch (arm_insn_r->opcode)
11146 {
11147 /* STR. */
11148 case 8:
11149 case 12:
11150 /* STR. */
11151 case 9:
11152 case 13:
11153 /* STRT. */
11154 case 1:
11155 case 5:
11156 /* STR. */
11157 case 0:
11158 case 4:
11159 record_buf_mem[0] = 4;
11160 break;
11161
11162 /* STRB. */
11163 case 10:
11164 case 14:
11165 /* STRB. */
11166 case 11:
11167 case 15:
11168 /* STRBT. */
11169 case 3:
11170 case 7:
11171 /* STRB. */
11172 case 2:
11173 case 6:
11174 record_buf_mem[0] = 1;
11175 break;
11176
11177 default:
11178 gdb_assert_not_reached ("no decoding pattern found");
11179 break;
11180 }
11181 record_buf_mem[1] = tgt_mem_addr;
11182 arm_insn_r->mem_rec_count = 1;
11183
11184 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11185 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11186 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11187 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11188 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11189 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11190 )
11191 {
11192 /* Rn is going to be changed in register scaled pre-indexed
11193 mode,and scaled post indexed mode. */
11194 record_buf[0] = reg_src2;
11195 arm_insn_r->reg_rec_count = 1;
11196 }
11197 }
11198 }
11199
11200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11201 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11202 return 0;
11203 }
11204
11205 /* Handle ARM mode instructions with opcode 100. */
11206
11207 static int
11208 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11209 {
11210 struct regcache *reg_cache = arm_insn_r->regcache;
11211 uint32_t register_count = 0, register_bits;
11212 uint32_t reg_base, addr_mode;
11213 uint32_t record_buf[24], record_buf_mem[48];
11214 uint32_t wback;
11215 ULONGEST u_regval;
11216
11217 /* Fetch the list of registers. */
11218 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11219 arm_insn_r->reg_rec_count = 0;
11220
11221 /* Fetch the base register that contains the address we are loading data
11222 to. */
11223 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11224
11225 /* Calculate wback. */
11226 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11227
11228 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11229 {
11230 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11231
11232 /* Find out which registers are going to be loaded from memory. */
11233 while (register_bits)
11234 {
11235 if (register_bits & 0x00000001)
11236 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11237 register_bits = register_bits >> 1;
11238 register_count++;
11239 }
11240
11241
11242 /* If wback is true, also save the base register, which is going to be
11243 written to. */
11244 if (wback)
11245 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11246
11247 /* Save the CPSR register. */
11248 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11249 }
11250 else
11251 {
11252 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11253
11254 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11255
11256 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11257
11258 /* Find out how many registers are going to be stored to memory. */
11259 while (register_bits)
11260 {
11261 if (register_bits & 0x00000001)
11262 register_count++;
11263 register_bits = register_bits >> 1;
11264 }
11265
11266 switch (addr_mode)
11267 {
11268 /* STMDA (STMED): Decrement after. */
11269 case 0:
11270 record_buf_mem[1] = (uint32_t) u_regval
11271 - register_count * ARM_INT_REGISTER_SIZE + 4;
11272 break;
11273 /* STM (STMIA, STMEA): Increment after. */
11274 case 1:
11275 record_buf_mem[1] = (uint32_t) u_regval;
11276 break;
11277 /* STMDB (STMFD): Decrement before. */
11278 case 2:
11279 record_buf_mem[1] = (uint32_t) u_regval
11280 - register_count * ARM_INT_REGISTER_SIZE;
11281 break;
11282 /* STMIB (STMFA): Increment before. */
11283 case 3:
11284 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11285 break;
11286 default:
11287 gdb_assert_not_reached ("no decoding pattern found");
11288 break;
11289 }
11290
11291 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11292 arm_insn_r->mem_rec_count = 1;
11293
11294 /* If wback is true, also save the base register, which is going to be
11295 written to. */
11296 if (wback)
11297 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11298 }
11299
11300 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11301 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11302 return 0;
11303 }
11304
11305 /* Handling opcode 101 insns. */
11306
11307 static int
11308 arm_record_b_bl (insn_decode_record *arm_insn_r)
11309 {
11310 uint32_t record_buf[8];
11311
11312 /* Handle B, BL, BLX(1) insns. */
11313 /* B simply branches so we do nothing here. */
11314 /* Note: BLX(1) doesnt fall here but instead it falls into
11315 extension space. */
11316 if (bit (arm_insn_r->arm_insn, 24))
11317 {
11318 record_buf[0] = ARM_LR_REGNUM;
11319 arm_insn_r->reg_rec_count = 1;
11320 }
11321
11322 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11323
11324 return 0;
11325 }
11326
11327 static int
11328 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11329 {
11330 printf_unfiltered (_("Process record does not support instruction "
11331 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11332 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11333
11334 return -1;
11335 }
11336
11337 /* Record handler for vector data transfer instructions. */
11338
11339 static int
11340 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11341 {
11342 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11343 uint32_t record_buf[4];
11344
11345 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11346 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11347 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11348 bit_l = bit (arm_insn_r->arm_insn, 20);
11349 bit_c = bit (arm_insn_r->arm_insn, 8);
11350
11351 /* Handle VMOV instruction. */
11352 if (bit_l && bit_c)
11353 {
11354 record_buf[0] = reg_t;
11355 arm_insn_r->reg_rec_count = 1;
11356 }
11357 else if (bit_l && !bit_c)
11358 {
11359 /* Handle VMOV instruction. */
11360 if (bits_a == 0x00)
11361 {
11362 record_buf[0] = reg_t;
11363 arm_insn_r->reg_rec_count = 1;
11364 }
11365 /* Handle VMRS instruction. */
11366 else if (bits_a == 0x07)
11367 {
11368 if (reg_t == 15)
11369 reg_t = ARM_PS_REGNUM;
11370
11371 record_buf[0] = reg_t;
11372 arm_insn_r->reg_rec_count = 1;
11373 }
11374 }
11375 else if (!bit_l && !bit_c)
11376 {
11377 /* Handle VMOV instruction. */
11378 if (bits_a == 0x00)
11379 {
11380 record_buf[0] = ARM_D0_REGNUM + reg_v;
11381
11382 arm_insn_r->reg_rec_count = 1;
11383 }
11384 /* Handle VMSR instruction. */
11385 else if (bits_a == 0x07)
11386 {
11387 record_buf[0] = ARM_FPSCR_REGNUM;
11388 arm_insn_r->reg_rec_count = 1;
11389 }
11390 }
11391 else if (!bit_l && bit_c)
11392 {
11393 /* Handle VMOV instruction. */
11394 if (!(bits_a & 0x04))
11395 {
11396 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11397 + ARM_D0_REGNUM;
11398 arm_insn_r->reg_rec_count = 1;
11399 }
11400 /* Handle VDUP instruction. */
11401 else
11402 {
11403 if (bit (arm_insn_r->arm_insn, 21))
11404 {
11405 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11406 record_buf[0] = reg_v + ARM_D0_REGNUM;
11407 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11408 arm_insn_r->reg_rec_count = 2;
11409 }
11410 else
11411 {
11412 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11413 record_buf[0] = reg_v + ARM_D0_REGNUM;
11414 arm_insn_r->reg_rec_count = 1;
11415 }
11416 }
11417 }
11418
11419 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11420 return 0;
11421 }
11422
11423 /* Record handler for extension register load/store instructions. */
11424
11425 static int
11426 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11427 {
11428 uint32_t opcode, single_reg;
11429 uint8_t op_vldm_vstm;
11430 uint32_t record_buf[8], record_buf_mem[128];
11431 ULONGEST u_regval = 0;
11432
11433 struct regcache *reg_cache = arm_insn_r->regcache;
11434
11435 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11436 single_reg = !bit (arm_insn_r->arm_insn, 8);
11437 op_vldm_vstm = opcode & 0x1b;
11438
11439 /* Handle VMOV instructions. */
11440 if ((opcode & 0x1e) == 0x04)
11441 {
11442 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11443 {
11444 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11445 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11446 arm_insn_r->reg_rec_count = 2;
11447 }
11448 else
11449 {
11450 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11451 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11452
11453 if (single_reg)
11454 {
11455 /* The first S register number m is REG_M:M (M is bit 5),
11456 the corresponding D register number is REG_M:M / 2, which
11457 is REG_M. */
11458 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11459 /* The second S register number is REG_M:M + 1, the
11460 corresponding D register number is (REG_M:M + 1) / 2.
11461 IOW, if bit M is 1, the first and second S registers
11462 are mapped to different D registers, otherwise, they are
11463 in the same D register. */
11464 if (bit_m)
11465 {
11466 record_buf[arm_insn_r->reg_rec_count++]
11467 = ARM_D0_REGNUM + reg_m + 1;
11468 }
11469 }
11470 else
11471 {
11472 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11473 arm_insn_r->reg_rec_count = 1;
11474 }
11475 }
11476 }
11477 /* Handle VSTM and VPUSH instructions. */
11478 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11479 || op_vldm_vstm == 0x12)
11480 {
11481 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11482 uint32_t memory_index = 0;
11483
11484 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11485 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11486 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11487 imm_off32 = imm_off8 << 2;
11488 memory_count = imm_off8;
11489
11490 if (bit (arm_insn_r->arm_insn, 23))
11491 start_address = u_regval;
11492 else
11493 start_address = u_regval - imm_off32;
11494
11495 if (bit (arm_insn_r->arm_insn, 21))
11496 {
11497 record_buf[0] = reg_rn;
11498 arm_insn_r->reg_rec_count = 1;
11499 }
11500
11501 while (memory_count > 0)
11502 {
11503 if (single_reg)
11504 {
11505 record_buf_mem[memory_index] = 4;
11506 record_buf_mem[memory_index + 1] = start_address;
11507 start_address = start_address + 4;
11508 memory_index = memory_index + 2;
11509 }
11510 else
11511 {
11512 record_buf_mem[memory_index] = 4;
11513 record_buf_mem[memory_index + 1] = start_address;
11514 record_buf_mem[memory_index + 2] = 4;
11515 record_buf_mem[memory_index + 3] = start_address + 4;
11516 start_address = start_address + 8;
11517 memory_index = memory_index + 4;
11518 }
11519 memory_count--;
11520 }
11521 arm_insn_r->mem_rec_count = (memory_index >> 1);
11522 }
11523 /* Handle VLDM instructions. */
11524 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11525 || op_vldm_vstm == 0x13)
11526 {
11527 uint32_t reg_count, reg_vd;
11528 uint32_t reg_index = 0;
11529 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11530
11531 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11532 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11533
11534 /* REG_VD is the first D register number. If the instruction
11535 loads memory to S registers (SINGLE_REG is TRUE), the register
11536 number is (REG_VD << 1 | bit D), so the corresponding D
11537 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11538 if (!single_reg)
11539 reg_vd = reg_vd | (bit_d << 4);
11540
11541 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11542 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11543
11544 /* If the instruction loads memory to D register, REG_COUNT should
11545 be divided by 2, according to the ARM Architecture Reference
11546 Manual. If the instruction loads memory to S register, divide by
11547 2 as well because two S registers are mapped to D register. */
11548 reg_count = reg_count / 2;
11549 if (single_reg && bit_d)
11550 {
11551 /* Increase the register count if S register list starts from
11552 an odd number (bit d is one). */
11553 reg_count++;
11554 }
11555
11556 while (reg_count > 0)
11557 {
11558 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11559 reg_count--;
11560 }
11561 arm_insn_r->reg_rec_count = reg_index;
11562 }
11563 /* VSTR Vector store register. */
11564 else if ((opcode & 0x13) == 0x10)
11565 {
11566 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11567 uint32_t memory_index = 0;
11568
11569 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11570 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11571 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11572 imm_off32 = imm_off8 << 2;
11573
11574 if (bit (arm_insn_r->arm_insn, 23))
11575 start_address = u_regval + imm_off32;
11576 else
11577 start_address = u_regval - imm_off32;
11578
11579 if (single_reg)
11580 {
11581 record_buf_mem[memory_index] = 4;
11582 record_buf_mem[memory_index + 1] = start_address;
11583 arm_insn_r->mem_rec_count = 1;
11584 }
11585 else
11586 {
11587 record_buf_mem[memory_index] = 4;
11588 record_buf_mem[memory_index + 1] = start_address;
11589 record_buf_mem[memory_index + 2] = 4;
11590 record_buf_mem[memory_index + 3] = start_address + 4;
11591 arm_insn_r->mem_rec_count = 2;
11592 }
11593 }
11594 /* VLDR Vector load register. */
11595 else if ((opcode & 0x13) == 0x11)
11596 {
11597 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11598
11599 if (!single_reg)
11600 {
11601 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11602 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11603 }
11604 else
11605 {
11606 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11607 /* Record register D rather than pseudo register S. */
11608 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11609 }
11610 arm_insn_r->reg_rec_count = 1;
11611 }
11612
11613 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11614 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11615 return 0;
11616 }
11617
11618 /* Record handler for arm/thumb mode VFP data processing instructions. */
11619
11620 static int
11621 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11622 {
11623 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11624 uint32_t record_buf[4];
11625 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11626 enum insn_types curr_insn_type = INSN_INV;
11627
11628 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11629 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11630 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11631 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11632 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11633 bit_d = bit (arm_insn_r->arm_insn, 22);
11634 /* Mask off the "D" bit. */
11635 opc1 = opc1 & ~0x04;
11636
11637 /* Handle VMLA, VMLS. */
11638 if (opc1 == 0x00)
11639 {
11640 if (bit (arm_insn_r->arm_insn, 10))
11641 {
11642 if (bit (arm_insn_r->arm_insn, 6))
11643 curr_insn_type = INSN_T0;
11644 else
11645 curr_insn_type = INSN_T1;
11646 }
11647 else
11648 {
11649 if (dp_op_sz)
11650 curr_insn_type = INSN_T1;
11651 else
11652 curr_insn_type = INSN_T2;
11653 }
11654 }
11655 /* Handle VNMLA, VNMLS, VNMUL. */
11656 else if (opc1 == 0x01)
11657 {
11658 if (dp_op_sz)
11659 curr_insn_type = INSN_T1;
11660 else
11661 curr_insn_type = INSN_T2;
11662 }
11663 /* Handle VMUL. */
11664 else if (opc1 == 0x02 && !(opc3 & 0x01))
11665 {
11666 if (bit (arm_insn_r->arm_insn, 10))
11667 {
11668 if (bit (arm_insn_r->arm_insn, 6))
11669 curr_insn_type = INSN_T0;
11670 else
11671 curr_insn_type = INSN_T1;
11672 }
11673 else
11674 {
11675 if (dp_op_sz)
11676 curr_insn_type = INSN_T1;
11677 else
11678 curr_insn_type = INSN_T2;
11679 }
11680 }
11681 /* Handle VADD, VSUB. */
11682 else if (opc1 == 0x03)
11683 {
11684 if (!bit (arm_insn_r->arm_insn, 9))
11685 {
11686 if (bit (arm_insn_r->arm_insn, 6))
11687 curr_insn_type = INSN_T0;
11688 else
11689 curr_insn_type = INSN_T1;
11690 }
11691 else
11692 {
11693 if (dp_op_sz)
11694 curr_insn_type = INSN_T1;
11695 else
11696 curr_insn_type = INSN_T2;
11697 }
11698 }
11699 /* Handle VDIV. */
11700 else if (opc1 == 0x08)
11701 {
11702 if (dp_op_sz)
11703 curr_insn_type = INSN_T1;
11704 else
11705 curr_insn_type = INSN_T2;
11706 }
11707 /* Handle all other vfp data processing instructions. */
11708 else if (opc1 == 0x0b)
11709 {
11710 /* Handle VMOV. */
11711 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11712 {
11713 if (bit (arm_insn_r->arm_insn, 4))
11714 {
11715 if (bit (arm_insn_r->arm_insn, 6))
11716 curr_insn_type = INSN_T0;
11717 else
11718 curr_insn_type = INSN_T1;
11719 }
11720 else
11721 {
11722 if (dp_op_sz)
11723 curr_insn_type = INSN_T1;
11724 else
11725 curr_insn_type = INSN_T2;
11726 }
11727 }
11728 /* Handle VNEG and VABS. */
11729 else if ((opc2 == 0x01 && opc3 == 0x01)
11730 || (opc2 == 0x00 && opc3 == 0x03))
11731 {
11732 if (!bit (arm_insn_r->arm_insn, 11))
11733 {
11734 if (bit (arm_insn_r->arm_insn, 6))
11735 curr_insn_type = INSN_T0;
11736 else
11737 curr_insn_type = INSN_T1;
11738 }
11739 else
11740 {
11741 if (dp_op_sz)
11742 curr_insn_type = INSN_T1;
11743 else
11744 curr_insn_type = INSN_T2;
11745 }
11746 }
11747 /* Handle VSQRT. */
11748 else if (opc2 == 0x01 && opc3 == 0x03)
11749 {
11750 if (dp_op_sz)
11751 curr_insn_type = INSN_T1;
11752 else
11753 curr_insn_type = INSN_T2;
11754 }
11755 /* Handle VCVT. */
11756 else if (opc2 == 0x07 && opc3 == 0x03)
11757 {
11758 if (!dp_op_sz)
11759 curr_insn_type = INSN_T1;
11760 else
11761 curr_insn_type = INSN_T2;
11762 }
11763 else if (opc3 & 0x01)
11764 {
11765 /* Handle VCVT. */
11766 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11767 {
11768 if (!bit (arm_insn_r->arm_insn, 18))
11769 curr_insn_type = INSN_T2;
11770 else
11771 {
11772 if (dp_op_sz)
11773 curr_insn_type = INSN_T1;
11774 else
11775 curr_insn_type = INSN_T2;
11776 }
11777 }
11778 /* Handle VCVT. */
11779 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11780 {
11781 if (dp_op_sz)
11782 curr_insn_type = INSN_T1;
11783 else
11784 curr_insn_type = INSN_T2;
11785 }
11786 /* Handle VCVTB, VCVTT. */
11787 else if ((opc2 & 0x0e) == 0x02)
11788 curr_insn_type = INSN_T2;
11789 /* Handle VCMP, VCMPE. */
11790 else if ((opc2 & 0x0e) == 0x04)
11791 curr_insn_type = INSN_T3;
11792 }
11793 }
11794
11795 switch (curr_insn_type)
11796 {
11797 case INSN_T0:
11798 reg_vd = reg_vd | (bit_d << 4);
11799 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11800 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11801 arm_insn_r->reg_rec_count = 2;
11802 break;
11803
11804 case INSN_T1:
11805 reg_vd = reg_vd | (bit_d << 4);
11806 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11807 arm_insn_r->reg_rec_count = 1;
11808 break;
11809
11810 case INSN_T2:
11811 reg_vd = (reg_vd << 1) | bit_d;
11812 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11813 arm_insn_r->reg_rec_count = 1;
11814 break;
11815
11816 case INSN_T3:
11817 record_buf[0] = ARM_FPSCR_REGNUM;
11818 arm_insn_r->reg_rec_count = 1;
11819 break;
11820
11821 default:
11822 gdb_assert_not_reached ("no decoding pattern found");
11823 break;
11824 }
11825
11826 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11827 return 0;
11828 }
11829
11830 /* Handling opcode 110 insns. */
11831
11832 static int
11833 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11834 {
11835 uint32_t op1, op1_ebit, coproc;
11836
11837 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11838 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11839 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11840
11841 if ((coproc & 0x0e) == 0x0a)
11842 {
11843 /* Handle extension register ld/st instructions. */
11844 if (!(op1 & 0x20))
11845 return arm_record_exreg_ld_st_insn (arm_insn_r);
11846
11847 /* 64-bit transfers between arm core and extension registers. */
11848 if ((op1 & 0x3e) == 0x04)
11849 return arm_record_exreg_ld_st_insn (arm_insn_r);
11850 }
11851 else
11852 {
11853 /* Handle coprocessor ld/st instructions. */
11854 if (!(op1 & 0x3a))
11855 {
11856 /* Store. */
11857 if (!op1_ebit)
11858 return arm_record_unsupported_insn (arm_insn_r);
11859 else
11860 /* Load. */
11861 return arm_record_unsupported_insn (arm_insn_r);
11862 }
11863
11864 /* Move to coprocessor from two arm core registers. */
11865 if (op1 == 0x4)
11866 return arm_record_unsupported_insn (arm_insn_r);
11867
11868 /* Move to two arm core registers from coprocessor. */
11869 if (op1 == 0x5)
11870 {
11871 uint32_t reg_t[2];
11872
11873 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11874 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11875 arm_insn_r->reg_rec_count = 2;
11876
11877 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11878 return 0;
11879 }
11880 }
11881 return arm_record_unsupported_insn (arm_insn_r);
11882 }
11883
11884 /* Handling opcode 111 insns. */
11885
11886 static int
11887 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11888 {
11889 uint32_t op, op1_ebit, coproc, bits_24_25;
11890 arm_gdbarch_tdep *tdep
11891 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
11892 struct regcache *reg_cache = arm_insn_r->regcache;
11893
11894 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11895 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11896 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11897 op = bit (arm_insn_r->arm_insn, 4);
11898 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11899
11900 /* Handle arm SWI/SVC system call instructions. */
11901 if (bits_24_25 == 0x3)
11902 {
11903 if (tdep->arm_syscall_record != NULL)
11904 {
11905 ULONGEST svc_operand, svc_number;
11906
11907 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11908
11909 if (svc_operand) /* OABI. */
11910 svc_number = svc_operand - 0x900000;
11911 else /* EABI. */
11912 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11913
11914 return tdep->arm_syscall_record (reg_cache, svc_number);
11915 }
11916 else
11917 {
11918 printf_unfiltered (_("no syscall record support\n"));
11919 return -1;
11920 }
11921 }
11922 else if (bits_24_25 == 0x02)
11923 {
11924 if (op)
11925 {
11926 if ((coproc & 0x0e) == 0x0a)
11927 {
11928 /* 8, 16, and 32-bit transfer */
11929 return arm_record_vdata_transfer_insn (arm_insn_r);
11930 }
11931 else
11932 {
11933 if (op1_ebit)
11934 {
11935 /* MRC, MRC2 */
11936 uint32_t record_buf[1];
11937
11938 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11939 if (record_buf[0] == 15)
11940 record_buf[0] = ARM_PS_REGNUM;
11941
11942 arm_insn_r->reg_rec_count = 1;
11943 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11944 record_buf);
11945 return 0;
11946 }
11947 else
11948 {
11949 /* MCR, MCR2 */
11950 return -1;
11951 }
11952 }
11953 }
11954 else
11955 {
11956 if ((coproc & 0x0e) == 0x0a)
11957 {
11958 /* VFP data-processing instructions. */
11959 return arm_record_vfp_data_proc_insn (arm_insn_r);
11960 }
11961 else
11962 {
11963 /* CDP, CDP2 */
11964 return -1;
11965 }
11966 }
11967 }
11968 else
11969 {
11970 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11971
11972 if (op1 == 5)
11973 {
11974 if ((coproc & 0x0e) != 0x0a)
11975 {
11976 /* MRRC, MRRC2 */
11977 return -1;
11978 }
11979 }
11980 else if (op1 == 4 || op1 == 5)
11981 {
11982 if ((coproc & 0x0e) == 0x0a)
11983 {
11984 /* 64-bit transfers between ARM core and extension */
11985 return -1;
11986 }
11987 else if (op1 == 4)
11988 {
11989 /* MCRR, MCRR2 */
11990 return -1;
11991 }
11992 }
11993 else if (op1 == 0 || op1 == 1)
11994 {
11995 /* UNDEFINED */
11996 return -1;
11997 }
11998 else
11999 {
12000 if ((coproc & 0x0e) == 0x0a)
12001 {
12002 /* Extension register load/store */
12003 }
12004 else
12005 {
12006 /* STC, STC2, LDC, LDC2 */
12007 }
12008 return -1;
12009 }
12010 }
12011
12012 return -1;
12013 }
12014
12015 /* Handling opcode 000 insns. */
12016
12017 static int
12018 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12019 {
12020 uint32_t record_buf[8];
12021 uint32_t reg_src1 = 0;
12022
12023 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12024
12025 record_buf[0] = ARM_PS_REGNUM;
12026 record_buf[1] = reg_src1;
12027 thumb_insn_r->reg_rec_count = 2;
12028
12029 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12030
12031 return 0;
12032 }
12033
12034
12035 /* Handling opcode 001 insns. */
12036
12037 static int
12038 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12039 {
12040 uint32_t record_buf[8];
12041 uint32_t reg_src1 = 0;
12042
12043 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12044
12045 record_buf[0] = ARM_PS_REGNUM;
12046 record_buf[1] = reg_src1;
12047 thumb_insn_r->reg_rec_count = 2;
12048
12049 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12050
12051 return 0;
12052 }
12053
12054 /* Handling opcode 010 insns. */
12055
12056 static int
12057 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12058 {
12059 struct regcache *reg_cache = thumb_insn_r->regcache;
12060 uint32_t record_buf[8], record_buf_mem[8];
12061
12062 uint32_t reg_src1 = 0, reg_src2 = 0;
12063 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12064
12065 ULONGEST u_regval[2] = {0};
12066
12067 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12068
12069 if (bit (thumb_insn_r->arm_insn, 12))
12070 {
12071 /* Handle load/store register offset. */
12072 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12073
12074 if (in_inclusive_range (opB, 4U, 7U))
12075 {
12076 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12077 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12078 record_buf[0] = reg_src1;
12079 thumb_insn_r->reg_rec_count = 1;
12080 }
12081 else if (in_inclusive_range (opB, 0U, 2U))
12082 {
12083 /* STR(2), STRB(2), STRH(2) . */
12084 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12085 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12086 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12087 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12088 if (0 == opB)
12089 record_buf_mem[0] = 4; /* STR (2). */
12090 else if (2 == opB)
12091 record_buf_mem[0] = 1; /* STRB (2). */
12092 else if (1 == opB)
12093 record_buf_mem[0] = 2; /* STRH (2). */
12094 record_buf_mem[1] = u_regval[0] + u_regval[1];
12095 thumb_insn_r->mem_rec_count = 1;
12096 }
12097 }
12098 else if (bit (thumb_insn_r->arm_insn, 11))
12099 {
12100 /* Handle load from literal pool. */
12101 /* LDR(3). */
12102 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12103 record_buf[0] = reg_src1;
12104 thumb_insn_r->reg_rec_count = 1;
12105 }
12106 else if (opcode1)
12107 {
12108 /* Special data instructions and branch and exchange */
12109 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12110 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12111 if ((3 == opcode2) && (!opcode3))
12112 {
12113 /* Branch with exchange. */
12114 record_buf[0] = ARM_PS_REGNUM;
12115 thumb_insn_r->reg_rec_count = 1;
12116 }
12117 else
12118 {
12119 /* Format 8; special data processing insns. */
12120 record_buf[0] = ARM_PS_REGNUM;
12121 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
12122 | bits (thumb_insn_r->arm_insn, 0, 2));
12123 thumb_insn_r->reg_rec_count = 2;
12124 }
12125 }
12126 else
12127 {
12128 /* Format 5; data processing insns. */
12129 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12130 if (bit (thumb_insn_r->arm_insn, 7))
12131 {
12132 reg_src1 = reg_src1 + 8;
12133 }
12134 record_buf[0] = ARM_PS_REGNUM;
12135 record_buf[1] = reg_src1;
12136 thumb_insn_r->reg_rec_count = 2;
12137 }
12138
12139 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12140 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12141 record_buf_mem);
12142
12143 return 0;
12144 }
12145
12146 /* Handling opcode 001 insns. */
12147
12148 static int
12149 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12150 {
12151 struct regcache *reg_cache = thumb_insn_r->regcache;
12152 uint32_t record_buf[8], record_buf_mem[8];
12153
12154 uint32_t reg_src1 = 0;
12155 uint32_t opcode = 0, immed_5 = 0;
12156
12157 ULONGEST u_regval = 0;
12158
12159 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12160
12161 if (opcode)
12162 {
12163 /* LDR(1). */
12164 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12165 record_buf[0] = reg_src1;
12166 thumb_insn_r->reg_rec_count = 1;
12167 }
12168 else
12169 {
12170 /* STR(1). */
12171 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12172 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12173 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12174 record_buf_mem[0] = 4;
12175 record_buf_mem[1] = u_regval + (immed_5 * 4);
12176 thumb_insn_r->mem_rec_count = 1;
12177 }
12178
12179 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12180 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12181 record_buf_mem);
12182
12183 return 0;
12184 }
12185
12186 /* Handling opcode 100 insns. */
12187
12188 static int
12189 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12190 {
12191 struct regcache *reg_cache = thumb_insn_r->regcache;
12192 uint32_t record_buf[8], record_buf_mem[8];
12193
12194 uint32_t reg_src1 = 0;
12195 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12196
12197 ULONGEST u_regval = 0;
12198
12199 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12200
12201 if (3 == opcode)
12202 {
12203 /* LDR(4). */
12204 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12205 record_buf[0] = reg_src1;
12206 thumb_insn_r->reg_rec_count = 1;
12207 }
12208 else if (1 == opcode)
12209 {
12210 /* LDRH(1). */
12211 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12212 record_buf[0] = reg_src1;
12213 thumb_insn_r->reg_rec_count = 1;
12214 }
12215 else if (2 == opcode)
12216 {
12217 /* STR(3). */
12218 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12219 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12220 record_buf_mem[0] = 4;
12221 record_buf_mem[1] = u_regval + (immed_8 * 4);
12222 thumb_insn_r->mem_rec_count = 1;
12223 }
12224 else if (0 == opcode)
12225 {
12226 /* STRH(1). */
12227 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12228 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12229 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12230 record_buf_mem[0] = 2;
12231 record_buf_mem[1] = u_regval + (immed_5 * 2);
12232 thumb_insn_r->mem_rec_count = 1;
12233 }
12234
12235 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12236 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12237 record_buf_mem);
12238
12239 return 0;
12240 }
12241
12242 /* Handling opcode 101 insns. */
12243
12244 static int
12245 thumb_record_misc (insn_decode_record *thumb_insn_r)
12246 {
12247 struct regcache *reg_cache = thumb_insn_r->regcache;
12248
12249 uint32_t opcode = 0;
12250 uint32_t register_bits = 0, register_count = 0;
12251 uint32_t index = 0, start_address = 0;
12252 uint32_t record_buf[24], record_buf_mem[48];
12253 uint32_t reg_src1;
12254
12255 ULONGEST u_regval = 0;
12256
12257 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12258
12259 if (opcode == 0 || opcode == 1)
12260 {
12261 /* ADR and ADD (SP plus immediate) */
12262
12263 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12264 record_buf[0] = reg_src1;
12265 thumb_insn_r->reg_rec_count = 1;
12266 }
12267 else
12268 {
12269 /* Miscellaneous 16-bit instructions */
12270 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12271
12272 switch (opcode2)
12273 {
12274 case 6:
12275 /* SETEND and CPS */
12276 break;
12277 case 0:
12278 /* ADD/SUB (SP plus immediate) */
12279 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12280 record_buf[0] = ARM_SP_REGNUM;
12281 thumb_insn_r->reg_rec_count = 1;
12282 break;
12283 case 1: /* fall through */
12284 case 3: /* fall through */
12285 case 9: /* fall through */
12286 case 11:
12287 /* CBNZ, CBZ */
12288 break;
12289 case 2:
12290 /* SXTH, SXTB, UXTH, UXTB */
12291 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12292 thumb_insn_r->reg_rec_count = 1;
12293 break;
12294 case 4: /* fall through */
12295 case 5:
12296 /* PUSH. */
12297 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12298 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12299 while (register_bits)
12300 {
12301 if (register_bits & 0x00000001)
12302 register_count++;
12303 register_bits = register_bits >> 1;
12304 }
12305 start_address = u_regval - \
12306 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12307 thumb_insn_r->mem_rec_count = register_count;
12308 while (register_count)
12309 {
12310 record_buf_mem[(register_count * 2) - 1] = start_address;
12311 record_buf_mem[(register_count * 2) - 2] = 4;
12312 start_address = start_address + 4;
12313 register_count--;
12314 }
12315 record_buf[0] = ARM_SP_REGNUM;
12316 thumb_insn_r->reg_rec_count = 1;
12317 break;
12318 case 10:
12319 /* REV, REV16, REVSH */
12320 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12321 thumb_insn_r->reg_rec_count = 1;
12322 break;
12323 case 12: /* fall through */
12324 case 13:
12325 /* POP. */
12326 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12327 while (register_bits)
12328 {
12329 if (register_bits & 0x00000001)
12330 record_buf[index++] = register_count;
12331 register_bits = register_bits >> 1;
12332 register_count++;
12333 }
12334 record_buf[index++] = ARM_PS_REGNUM;
12335 record_buf[index++] = ARM_SP_REGNUM;
12336 thumb_insn_r->reg_rec_count = index;
12337 break;
12338 case 0xe:
12339 /* BKPT insn. */
12340 /* Handle enhanced software breakpoint insn, BKPT. */
12341 /* CPSR is changed to be executed in ARM state, disabling normal
12342 interrupts, entering abort mode. */
12343 /* According to high vector configuration PC is set. */
12344 /* User hits breakpoint and type reverse, in that case, we need to go back with
12345 previous CPSR and Program Counter. */
12346 record_buf[0] = ARM_PS_REGNUM;
12347 record_buf[1] = ARM_LR_REGNUM;
12348 thumb_insn_r->reg_rec_count = 2;
12349 /* We need to save SPSR value, which is not yet done. */
12350 printf_unfiltered (_("Process record does not support instruction "
12351 "0x%0x at address %s.\n"),
12352 thumb_insn_r->arm_insn,
12353 paddress (thumb_insn_r->gdbarch,
12354 thumb_insn_r->this_addr));
12355 return -1;
12356
12357 case 0xf:
12358 /* If-Then, and hints */
12359 break;
12360 default:
12361 return -1;
12362 };
12363 }
12364
12365 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12366 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12367 record_buf_mem);
12368
12369 return 0;
12370 }
12371
12372 /* Handling opcode 110 insns. */
12373
12374 static int
12375 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12376 {
12377 arm_gdbarch_tdep *tdep
12378 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
12379 struct regcache *reg_cache = thumb_insn_r->regcache;
12380
12381 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12382 uint32_t reg_src1 = 0;
12383 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12384 uint32_t index = 0, start_address = 0;
12385 uint32_t record_buf[24], record_buf_mem[48];
12386
12387 ULONGEST u_regval = 0;
12388
12389 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12390 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12391
12392 if (1 == opcode2)
12393 {
12394
12395 /* LDMIA. */
12396 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12397 /* Get Rn. */
12398 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12399 while (register_bits)
12400 {
12401 if (register_bits & 0x00000001)
12402 record_buf[index++] = register_count;
12403 register_bits = register_bits >> 1;
12404 register_count++;
12405 }
12406 record_buf[index++] = reg_src1;
12407 thumb_insn_r->reg_rec_count = index;
12408 }
12409 else if (0 == opcode2)
12410 {
12411 /* It handles both STMIA. */
12412 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12413 /* Get Rn. */
12414 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12415 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12416 while (register_bits)
12417 {
12418 if (register_bits & 0x00000001)
12419 register_count++;
12420 register_bits = register_bits >> 1;
12421 }
12422 start_address = u_regval;
12423 thumb_insn_r->mem_rec_count = register_count;
12424 while (register_count)
12425 {
12426 record_buf_mem[(register_count * 2) - 1] = start_address;
12427 record_buf_mem[(register_count * 2) - 2] = 4;
12428 start_address = start_address + 4;
12429 register_count--;
12430 }
12431 }
12432 else if (0x1F == opcode1)
12433 {
12434 /* Handle arm syscall insn. */
12435 if (tdep->arm_syscall_record != NULL)
12436 {
12437 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12438 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12439 }
12440 else
12441 {
12442 printf_unfiltered (_("no syscall record support\n"));
12443 return -1;
12444 }
12445 }
12446
12447 /* B (1), conditional branch is automatically taken care in process_record,
12448 as PC is saved there. */
12449
12450 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12451 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12452 record_buf_mem);
12453
12454 return ret;
12455 }
12456
12457 /* Handling opcode 111 insns. */
12458
12459 static int
12460 thumb_record_branch (insn_decode_record *thumb_insn_r)
12461 {
12462 uint32_t record_buf[8];
12463 uint32_t bits_h = 0;
12464
12465 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12466
12467 if (2 == bits_h || 3 == bits_h)
12468 {
12469 /* BL */
12470 record_buf[0] = ARM_LR_REGNUM;
12471 thumb_insn_r->reg_rec_count = 1;
12472 }
12473 else if (1 == bits_h)
12474 {
12475 /* BLX(1). */
12476 record_buf[0] = ARM_PS_REGNUM;
12477 record_buf[1] = ARM_LR_REGNUM;
12478 thumb_insn_r->reg_rec_count = 2;
12479 }
12480
12481 /* B(2) is automatically taken care in process_record, as PC is
12482 saved there. */
12483
12484 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12485
12486 return 0;
12487 }
12488
12489 /* Handler for thumb2 load/store multiple instructions. */
12490
12491 static int
12492 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12493 {
12494 struct regcache *reg_cache = thumb2_insn_r->regcache;
12495
12496 uint32_t reg_rn, op;
12497 uint32_t register_bits = 0, register_count = 0;
12498 uint32_t index = 0, start_address = 0;
12499 uint32_t record_buf[24], record_buf_mem[48];
12500
12501 ULONGEST u_regval = 0;
12502
12503 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12504 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12505
12506 if (0 == op || 3 == op)
12507 {
12508 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12509 {
12510 /* Handle RFE instruction. */
12511 record_buf[0] = ARM_PS_REGNUM;
12512 thumb2_insn_r->reg_rec_count = 1;
12513 }
12514 else
12515 {
12516 /* Handle SRS instruction after reading banked SP. */
12517 return arm_record_unsupported_insn (thumb2_insn_r);
12518 }
12519 }
12520 else if (1 == op || 2 == op)
12521 {
12522 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12523 {
12524 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12525 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12526 while (register_bits)
12527 {
12528 if (register_bits & 0x00000001)
12529 record_buf[index++] = register_count;
12530
12531 register_count++;
12532 register_bits = register_bits >> 1;
12533 }
12534 record_buf[index++] = reg_rn;
12535 record_buf[index++] = ARM_PS_REGNUM;
12536 thumb2_insn_r->reg_rec_count = index;
12537 }
12538 else
12539 {
12540 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12541 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12542 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12543 while (register_bits)
12544 {
12545 if (register_bits & 0x00000001)
12546 register_count++;
12547
12548 register_bits = register_bits >> 1;
12549 }
12550
12551 if (1 == op)
12552 {
12553 /* Start address calculation for LDMDB/LDMEA. */
12554 start_address = u_regval;
12555 }
12556 else if (2 == op)
12557 {
12558 /* Start address calculation for LDMDB/LDMEA. */
12559 start_address = u_regval - register_count * 4;
12560 }
12561
12562 thumb2_insn_r->mem_rec_count = register_count;
12563 while (register_count)
12564 {
12565 record_buf_mem[register_count * 2 - 1] = start_address;
12566 record_buf_mem[register_count * 2 - 2] = 4;
12567 start_address = start_address + 4;
12568 register_count--;
12569 }
12570 record_buf[0] = reg_rn;
12571 record_buf[1] = ARM_PS_REGNUM;
12572 thumb2_insn_r->reg_rec_count = 2;
12573 }
12574 }
12575
12576 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12577 record_buf_mem);
12578 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12579 record_buf);
12580 return ARM_RECORD_SUCCESS;
12581 }
12582
12583 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12584 instructions. */
12585
12586 static int
12587 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12588 {
12589 struct regcache *reg_cache = thumb2_insn_r->regcache;
12590
12591 uint32_t reg_rd, reg_rn, offset_imm;
12592 uint32_t reg_dest1, reg_dest2;
12593 uint32_t address, offset_addr;
12594 uint32_t record_buf[8], record_buf_mem[8];
12595 uint32_t op1, op2, op3;
12596
12597 ULONGEST u_regval[2];
12598
12599 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12600 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12601 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12602
12603 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12604 {
12605 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12606 {
12607 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12608 record_buf[0] = reg_dest1;
12609 record_buf[1] = ARM_PS_REGNUM;
12610 thumb2_insn_r->reg_rec_count = 2;
12611 }
12612
12613 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12614 {
12615 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12616 record_buf[2] = reg_dest2;
12617 thumb2_insn_r->reg_rec_count = 3;
12618 }
12619 }
12620 else
12621 {
12622 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12623 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12624
12625 if (0 == op1 && 0 == op2)
12626 {
12627 /* Handle STREX. */
12628 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12629 address = u_regval[0] + (offset_imm * 4);
12630 record_buf_mem[0] = 4;
12631 record_buf_mem[1] = address;
12632 thumb2_insn_r->mem_rec_count = 1;
12633 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12634 record_buf[0] = reg_rd;
12635 thumb2_insn_r->reg_rec_count = 1;
12636 }
12637 else if (1 == op1 && 0 == op2)
12638 {
12639 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12640 record_buf[0] = reg_rd;
12641 thumb2_insn_r->reg_rec_count = 1;
12642 address = u_regval[0];
12643 record_buf_mem[1] = address;
12644
12645 if (4 == op3)
12646 {
12647 /* Handle STREXB. */
12648 record_buf_mem[0] = 1;
12649 thumb2_insn_r->mem_rec_count = 1;
12650 }
12651 else if (5 == op3)
12652 {
12653 /* Handle STREXH. */
12654 record_buf_mem[0] = 2 ;
12655 thumb2_insn_r->mem_rec_count = 1;
12656 }
12657 else if (7 == op3)
12658 {
12659 /* Handle STREXD. */
12660 address = u_regval[0];
12661 record_buf_mem[0] = 4;
12662 record_buf_mem[2] = 4;
12663 record_buf_mem[3] = address + 4;
12664 thumb2_insn_r->mem_rec_count = 2;
12665 }
12666 }
12667 else
12668 {
12669 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12670
12671 if (bit (thumb2_insn_r->arm_insn, 24))
12672 {
12673 if (bit (thumb2_insn_r->arm_insn, 23))
12674 offset_addr = u_regval[0] + (offset_imm * 4);
12675 else
12676 offset_addr = u_regval[0] - (offset_imm * 4);
12677
12678 address = offset_addr;
12679 }
12680 else
12681 address = u_regval[0];
12682
12683 record_buf_mem[0] = 4;
12684 record_buf_mem[1] = address;
12685 record_buf_mem[2] = 4;
12686 record_buf_mem[3] = address + 4;
12687 thumb2_insn_r->mem_rec_count = 2;
12688 record_buf[0] = reg_rn;
12689 thumb2_insn_r->reg_rec_count = 1;
12690 }
12691 }
12692
12693 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12694 record_buf);
12695 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12696 record_buf_mem);
12697 return ARM_RECORD_SUCCESS;
12698 }
12699
12700 /* Handler for thumb2 data processing (shift register and modified immediate)
12701 instructions. */
12702
12703 static int
12704 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12705 {
12706 uint32_t reg_rd, op;
12707 uint32_t record_buf[8];
12708
12709 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12710 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12711
12712 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12713 {
12714 record_buf[0] = ARM_PS_REGNUM;
12715 thumb2_insn_r->reg_rec_count = 1;
12716 }
12717 else
12718 {
12719 record_buf[0] = reg_rd;
12720 record_buf[1] = ARM_PS_REGNUM;
12721 thumb2_insn_r->reg_rec_count = 2;
12722 }
12723
12724 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12725 record_buf);
12726 return ARM_RECORD_SUCCESS;
12727 }
12728
12729 /* Generic handler for thumb2 instructions which effect destination and PS
12730 registers. */
12731
12732 static int
12733 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12734 {
12735 uint32_t reg_rd;
12736 uint32_t record_buf[8];
12737
12738 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12739
12740 record_buf[0] = reg_rd;
12741 record_buf[1] = ARM_PS_REGNUM;
12742 thumb2_insn_r->reg_rec_count = 2;
12743
12744 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12745 record_buf);
12746 return ARM_RECORD_SUCCESS;
12747 }
12748
12749 /* Handler for thumb2 branch and miscellaneous control instructions. */
12750
12751 static int
12752 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12753 {
12754 uint32_t op, op1, op2;
12755 uint32_t record_buf[8];
12756
12757 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12758 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12759 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12760
12761 /* Handle MSR insn. */
12762 if (!(op1 & 0x2) && 0x38 == op)
12763 {
12764 if (!(op2 & 0x3))
12765 {
12766 /* CPSR is going to be changed. */
12767 record_buf[0] = ARM_PS_REGNUM;
12768 thumb2_insn_r->reg_rec_count = 1;
12769 }
12770 else
12771 {
12772 arm_record_unsupported_insn(thumb2_insn_r);
12773 return -1;
12774 }
12775 }
12776 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12777 {
12778 /* BLX. */
12779 record_buf[0] = ARM_PS_REGNUM;
12780 record_buf[1] = ARM_LR_REGNUM;
12781 thumb2_insn_r->reg_rec_count = 2;
12782 }
12783
12784 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12785 record_buf);
12786 return ARM_RECORD_SUCCESS;
12787 }
12788
12789 /* Handler for thumb2 store single data item instructions. */
12790
12791 static int
12792 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12793 {
12794 struct regcache *reg_cache = thumb2_insn_r->regcache;
12795
12796 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12797 uint32_t address, offset_addr;
12798 uint32_t record_buf[8], record_buf_mem[8];
12799 uint32_t op1, op2;
12800
12801 ULONGEST u_regval[2];
12802
12803 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12804 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12805 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12806 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12807
12808 if (bit (thumb2_insn_r->arm_insn, 23))
12809 {
12810 /* T2 encoding. */
12811 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12812 offset_addr = u_regval[0] + offset_imm;
12813 address = offset_addr;
12814 }
12815 else
12816 {
12817 /* T3 encoding. */
12818 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12819 {
12820 /* Handle STRB (register). */
12821 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12822 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12823 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12824 offset_addr = u_regval[1] << shift_imm;
12825 address = u_regval[0] + offset_addr;
12826 }
12827 else
12828 {
12829 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12830 if (bit (thumb2_insn_r->arm_insn, 10))
12831 {
12832 if (bit (thumb2_insn_r->arm_insn, 9))
12833 offset_addr = u_regval[0] + offset_imm;
12834 else
12835 offset_addr = u_regval[0] - offset_imm;
12836
12837 address = offset_addr;
12838 }
12839 else
12840 address = u_regval[0];
12841 }
12842 }
12843
12844 switch (op1)
12845 {
12846 /* Store byte instructions. */
12847 case 4:
12848 case 0:
12849 record_buf_mem[0] = 1;
12850 break;
12851 /* Store half word instructions. */
12852 case 1:
12853 case 5:
12854 record_buf_mem[0] = 2;
12855 break;
12856 /* Store word instructions. */
12857 case 2:
12858 case 6:
12859 record_buf_mem[0] = 4;
12860 break;
12861
12862 default:
12863 gdb_assert_not_reached ("no decoding pattern found");
12864 break;
12865 }
12866
12867 record_buf_mem[1] = address;
12868 thumb2_insn_r->mem_rec_count = 1;
12869 record_buf[0] = reg_rn;
12870 thumb2_insn_r->reg_rec_count = 1;
12871
12872 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12873 record_buf);
12874 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12875 record_buf_mem);
12876 return ARM_RECORD_SUCCESS;
12877 }
12878
12879 /* Handler for thumb2 load memory hints instructions. */
12880
12881 static int
12882 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12883 {
12884 uint32_t record_buf[8];
12885 uint32_t reg_rt, reg_rn;
12886
12887 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12888 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12889
12890 if (ARM_PC_REGNUM != reg_rt)
12891 {
12892 record_buf[0] = reg_rt;
12893 record_buf[1] = reg_rn;
12894 record_buf[2] = ARM_PS_REGNUM;
12895 thumb2_insn_r->reg_rec_count = 3;
12896
12897 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12898 record_buf);
12899 return ARM_RECORD_SUCCESS;
12900 }
12901
12902 return ARM_RECORD_FAILURE;
12903 }
12904
12905 /* Handler for thumb2 load word instructions. */
12906
12907 static int
12908 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12909 {
12910 uint32_t record_buf[8];
12911
12912 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12913 record_buf[1] = ARM_PS_REGNUM;
12914 thumb2_insn_r->reg_rec_count = 2;
12915
12916 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12917 record_buf);
12918 return ARM_RECORD_SUCCESS;
12919 }
12920
12921 /* Handler for thumb2 long multiply, long multiply accumulate, and
12922 divide instructions. */
12923
12924 static int
12925 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12926 {
12927 uint32_t opcode1 = 0, opcode2 = 0;
12928 uint32_t record_buf[8];
12929
12930 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12931 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12932
12933 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12934 {
12935 /* Handle SMULL, UMULL, SMULAL. */
12936 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12937 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12938 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12939 record_buf[2] = ARM_PS_REGNUM;
12940 thumb2_insn_r->reg_rec_count = 3;
12941 }
12942 else if (1 == opcode1 || 3 == opcode2)
12943 {
12944 /* Handle SDIV and UDIV. */
12945 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12946 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12947 record_buf[2] = ARM_PS_REGNUM;
12948 thumb2_insn_r->reg_rec_count = 3;
12949 }
12950 else
12951 return ARM_RECORD_FAILURE;
12952
12953 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12954 record_buf);
12955 return ARM_RECORD_SUCCESS;
12956 }
12957
12958 /* Record handler for thumb32 coprocessor instructions. */
12959
12960 static int
12961 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12962 {
12963 if (bit (thumb2_insn_r->arm_insn, 25))
12964 return arm_record_coproc_data_proc (thumb2_insn_r);
12965 else
12966 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12967 }
12968
12969 /* Record handler for advance SIMD structure load/store instructions. */
12970
12971 static int
12972 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12973 {
12974 struct regcache *reg_cache = thumb2_insn_r->regcache;
12975 uint32_t l_bit, a_bit, b_bits;
12976 uint32_t record_buf[128], record_buf_mem[128];
12977 uint32_t reg_rn, reg_vd, address, f_elem;
12978 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12979 uint8_t f_ebytes;
12980
12981 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12982 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12983 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12984 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12985 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12986 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12987 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12988 f_elem = 8 / f_ebytes;
12989
12990 if (!l_bit)
12991 {
12992 ULONGEST u_regval = 0;
12993 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12994 address = u_regval;
12995
12996 if (!a_bit)
12997 {
12998 /* Handle VST1. */
12999 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13000 {
13001 if (b_bits == 0x07)
13002 bf_regs = 1;
13003 else if (b_bits == 0x0a)
13004 bf_regs = 2;
13005 else if (b_bits == 0x06)
13006 bf_regs = 3;
13007 else if (b_bits == 0x02)
13008 bf_regs = 4;
13009 else
13010 bf_regs = 0;
13011
13012 for (index_r = 0; index_r < bf_regs; index_r++)
13013 {
13014 for (index_e = 0; index_e < f_elem; index_e++)
13015 {
13016 record_buf_mem[index_m++] = f_ebytes;
13017 record_buf_mem[index_m++] = address;
13018 address = address + f_ebytes;
13019 thumb2_insn_r->mem_rec_count += 1;
13020 }
13021 }
13022 }
13023 /* Handle VST2. */
13024 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13025 {
13026 if (b_bits == 0x09 || b_bits == 0x08)
13027 bf_regs = 1;
13028 else if (b_bits == 0x03)
13029 bf_regs = 2;
13030 else
13031 bf_regs = 0;
13032
13033 for (index_r = 0; index_r < bf_regs; index_r++)
13034 for (index_e = 0; index_e < f_elem; index_e++)
13035 {
13036 for (loop_t = 0; loop_t < 2; loop_t++)
13037 {
13038 record_buf_mem[index_m++] = f_ebytes;
13039 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13040 thumb2_insn_r->mem_rec_count += 1;
13041 }
13042 address = address + (2 * f_ebytes);
13043 }
13044 }
13045 /* Handle VST3. */
13046 else if ((b_bits & 0x0e) == 0x04)
13047 {
13048 for (index_e = 0; index_e < f_elem; index_e++)
13049 {
13050 for (loop_t = 0; loop_t < 3; loop_t++)
13051 {
13052 record_buf_mem[index_m++] = f_ebytes;
13053 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13054 thumb2_insn_r->mem_rec_count += 1;
13055 }
13056 address = address + (3 * f_ebytes);
13057 }
13058 }
13059 /* Handle VST4. */
13060 else if (!(b_bits & 0x0e))
13061 {
13062 for (index_e = 0; index_e < f_elem; index_e++)
13063 {
13064 for (loop_t = 0; loop_t < 4; loop_t++)
13065 {
13066 record_buf_mem[index_m++] = f_ebytes;
13067 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13068 thumb2_insn_r->mem_rec_count += 1;
13069 }
13070 address = address + (4 * f_ebytes);
13071 }
13072 }
13073 }
13074 else
13075 {
13076 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13077
13078 if (bft_size == 0x00)
13079 f_ebytes = 1;
13080 else if (bft_size == 0x01)
13081 f_ebytes = 2;
13082 else if (bft_size == 0x02)
13083 f_ebytes = 4;
13084 else
13085 f_ebytes = 0;
13086
13087 /* Handle VST1. */
13088 if (!(b_bits & 0x0b) || b_bits == 0x08)
13089 thumb2_insn_r->mem_rec_count = 1;
13090 /* Handle VST2. */
13091 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13092 thumb2_insn_r->mem_rec_count = 2;
13093 /* Handle VST3. */
13094 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13095 thumb2_insn_r->mem_rec_count = 3;
13096 /* Handle VST4. */
13097 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13098 thumb2_insn_r->mem_rec_count = 4;
13099
13100 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13101 {
13102 record_buf_mem[index_m] = f_ebytes;
13103 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13104 }
13105 }
13106 }
13107 else
13108 {
13109 if (!a_bit)
13110 {
13111 /* Handle VLD1. */
13112 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13113 thumb2_insn_r->reg_rec_count = 1;
13114 /* Handle VLD2. */
13115 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13116 thumb2_insn_r->reg_rec_count = 2;
13117 /* Handle VLD3. */
13118 else if ((b_bits & 0x0e) == 0x04)
13119 thumb2_insn_r->reg_rec_count = 3;
13120 /* Handle VLD4. */
13121 else if (!(b_bits & 0x0e))
13122 thumb2_insn_r->reg_rec_count = 4;
13123 }
13124 else
13125 {
13126 /* Handle VLD1. */
13127 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13128 thumb2_insn_r->reg_rec_count = 1;
13129 /* Handle VLD2. */
13130 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13131 thumb2_insn_r->reg_rec_count = 2;
13132 /* Handle VLD3. */
13133 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13134 thumb2_insn_r->reg_rec_count = 3;
13135 /* Handle VLD4. */
13136 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13137 thumb2_insn_r->reg_rec_count = 4;
13138
13139 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13140 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13141 }
13142 }
13143
13144 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13145 {
13146 record_buf[index_r] = reg_rn;
13147 thumb2_insn_r->reg_rec_count += 1;
13148 }
13149
13150 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13151 record_buf);
13152 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13153 record_buf_mem);
13154 return 0;
13155 }
13156
13157 /* Decodes thumb2 instruction type and invokes its record handler. */
13158
13159 static unsigned int
13160 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13161 {
13162 uint32_t op, op1, op2;
13163
13164 op = bit (thumb2_insn_r->arm_insn, 15);
13165 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13166 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13167
13168 if (op1 == 0x01)
13169 {
13170 if (!(op2 & 0x64 ))
13171 {
13172 /* Load/store multiple instruction. */
13173 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13174 }
13175 else if ((op2 & 0x64) == 0x4)
13176 {
13177 /* Load/store (dual/exclusive) and table branch instruction. */
13178 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13179 }
13180 else if ((op2 & 0x60) == 0x20)
13181 {
13182 /* Data-processing (shifted register). */
13183 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13184 }
13185 else if (op2 & 0x40)
13186 {
13187 /* Co-processor instructions. */
13188 return thumb2_record_coproc_insn (thumb2_insn_r);
13189 }
13190 }
13191 else if (op1 == 0x02)
13192 {
13193 if (op)
13194 {
13195 /* Branches and miscellaneous control instructions. */
13196 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13197 }
13198 else if (op2 & 0x20)
13199 {
13200 /* Data-processing (plain binary immediate) instruction. */
13201 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13202 }
13203 else
13204 {
13205 /* Data-processing (modified immediate). */
13206 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13207 }
13208 }
13209 else if (op1 == 0x03)
13210 {
13211 if (!(op2 & 0x71 ))
13212 {
13213 /* Store single data item. */
13214 return thumb2_record_str_single_data (thumb2_insn_r);
13215 }
13216 else if (!((op2 & 0x71) ^ 0x10))
13217 {
13218 /* Advanced SIMD or structure load/store instructions. */
13219 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13220 }
13221 else if (!((op2 & 0x67) ^ 0x01))
13222 {
13223 /* Load byte, memory hints instruction. */
13224 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13225 }
13226 else if (!((op2 & 0x67) ^ 0x03))
13227 {
13228 /* Load halfword, memory hints instruction. */
13229 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13230 }
13231 else if (!((op2 & 0x67) ^ 0x05))
13232 {
13233 /* Load word instruction. */
13234 return thumb2_record_ld_word (thumb2_insn_r);
13235 }
13236 else if (!((op2 & 0x70) ^ 0x20))
13237 {
13238 /* Data-processing (register) instruction. */
13239 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13240 }
13241 else if (!((op2 & 0x78) ^ 0x30))
13242 {
13243 /* Multiply, multiply accumulate, abs diff instruction. */
13244 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13245 }
13246 else if (!((op2 & 0x78) ^ 0x38))
13247 {
13248 /* Long multiply, long multiply accumulate, and divide. */
13249 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13250 }
13251 else if (op2 & 0x40)
13252 {
13253 /* Co-processor instructions. */
13254 return thumb2_record_coproc_insn (thumb2_insn_r);
13255 }
13256 }
13257
13258 return -1;
13259 }
13260
13261 namespace {
13262 /* Abstract memory reader. */
13263
13264 class abstract_memory_reader
13265 {
13266 public:
13267 /* Read LEN bytes of target memory at address MEMADDR, placing the
13268 results in GDB's memory at BUF. Return true on success. */
13269
13270 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13271 };
13272
13273 /* Instruction reader from real target. */
13274
13275 class instruction_reader : public abstract_memory_reader
13276 {
13277 public:
13278 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13279 {
13280 if (target_read_memory (memaddr, buf, len))
13281 return false;
13282 else
13283 return true;
13284 }
13285 };
13286
13287 } // namespace
13288
13289 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13290 and positive val on failure. */
13291
13292 static int
13293 extract_arm_insn (abstract_memory_reader& reader,
13294 insn_decode_record *insn_record, uint32_t insn_size)
13295 {
13296 gdb_byte buf[insn_size];
13297
13298 memset (&buf[0], 0, insn_size);
13299
13300 if (!reader.read (insn_record->this_addr, buf, insn_size))
13301 return 1;
13302 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13303 insn_size,
13304 gdbarch_byte_order_for_code (insn_record->gdbarch));
13305 return 0;
13306 }
13307
13308 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13309
13310 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13311 dispatch it. */
13312
13313 static int
13314 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13315 record_type_t record_type, uint32_t insn_size)
13316 {
13317
13318 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13319 instruction. */
13320 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13321 {
13322 arm_record_data_proc_misc_ld_str, /* 000. */
13323 arm_record_data_proc_imm, /* 001. */
13324 arm_record_ld_st_imm_offset, /* 010. */
13325 arm_record_ld_st_reg_offset, /* 011. */
13326 arm_record_ld_st_multiple, /* 100. */
13327 arm_record_b_bl, /* 101. */
13328 arm_record_asimd_vfp_coproc, /* 110. */
13329 arm_record_coproc_data_proc /* 111. */
13330 };
13331
13332 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13333 instruction. */
13334 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13335 { \
13336 thumb_record_shift_add_sub, /* 000. */
13337 thumb_record_add_sub_cmp_mov, /* 001. */
13338 thumb_record_ld_st_reg_offset, /* 010. */
13339 thumb_record_ld_st_imm_offset, /* 011. */
13340 thumb_record_ld_st_stack, /* 100. */
13341 thumb_record_misc, /* 101. */
13342 thumb_record_ldm_stm_swi, /* 110. */
13343 thumb_record_branch /* 111. */
13344 };
13345
13346 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13347 uint32_t insn_id = 0;
13348
13349 if (extract_arm_insn (reader, arm_record, insn_size))
13350 {
13351 if (record_debug)
13352 {
13353 printf_unfiltered (_("Process record: error reading memory at "
13354 "addr %s len = %d.\n"),
13355 paddress (arm_record->gdbarch,
13356 arm_record->this_addr), insn_size);
13357 }
13358 return -1;
13359 }
13360 else if (ARM_RECORD == record_type)
13361 {
13362 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13363 insn_id = bits (arm_record->arm_insn, 25, 27);
13364
13365 if (arm_record->cond == 0xf)
13366 ret = arm_record_extension_space (arm_record);
13367 else
13368 {
13369 /* If this insn has fallen into extension space
13370 then we need not decode it anymore. */
13371 ret = arm_handle_insn[insn_id] (arm_record);
13372 }
13373 if (ret != ARM_RECORD_SUCCESS)
13374 {
13375 arm_record_unsupported_insn (arm_record);
13376 ret = -1;
13377 }
13378 }
13379 else if (THUMB_RECORD == record_type)
13380 {
13381 /* As thumb does not have condition codes, we set negative. */
13382 arm_record->cond = -1;
13383 insn_id = bits (arm_record->arm_insn, 13, 15);
13384 ret = thumb_handle_insn[insn_id] (arm_record);
13385 if (ret != ARM_RECORD_SUCCESS)
13386 {
13387 arm_record_unsupported_insn (arm_record);
13388 ret = -1;
13389 }
13390 }
13391 else if (THUMB2_RECORD == record_type)
13392 {
13393 /* As thumb does not have condition codes, we set negative. */
13394 arm_record->cond = -1;
13395
13396 /* Swap first half of 32bit thumb instruction with second half. */
13397 arm_record->arm_insn
13398 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13399
13400 ret = thumb2_record_decode_insn_handler (arm_record);
13401
13402 if (ret != ARM_RECORD_SUCCESS)
13403 {
13404 arm_record_unsupported_insn (arm_record);
13405 ret = -1;
13406 }
13407 }
13408 else
13409 {
13410 /* Throw assertion. */
13411 gdb_assert_not_reached ("not a valid instruction, could not decode");
13412 }
13413
13414 return ret;
13415 }
13416
13417 #if GDB_SELF_TEST
13418 namespace selftests {
13419
13420 /* Provide both 16-bit and 32-bit thumb instructions. */
13421
13422 class instruction_reader_thumb : public abstract_memory_reader
13423 {
13424 public:
13425 template<size_t SIZE>
13426 instruction_reader_thumb (enum bfd_endian endian,
13427 const uint16_t (&insns)[SIZE])
13428 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13429 {}
13430
13431 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13432 {
13433 SELF_CHECK (len == 4 || len == 2);
13434 SELF_CHECK (memaddr % 2 == 0);
13435 SELF_CHECK ((memaddr / 2) < m_insns_size);
13436
13437 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13438 if (len == 4)
13439 {
13440 store_unsigned_integer (&buf[2], 2, m_endian,
13441 m_insns[memaddr / 2 + 1]);
13442 }
13443 return true;
13444 }
13445
13446 private:
13447 enum bfd_endian m_endian;
13448 const uint16_t *m_insns;
13449 size_t m_insns_size;
13450 };
13451
13452 static void
13453 arm_record_test (void)
13454 {
13455 struct gdbarch_info info;
13456 info.bfd_arch_info = bfd_scan_arch ("arm");
13457
13458 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13459
13460 SELF_CHECK (gdbarch != NULL);
13461
13462 /* 16-bit Thumb instructions. */
13463 {
13464 insn_decode_record arm_record;
13465
13466 memset (&arm_record, 0, sizeof (insn_decode_record));
13467 arm_record.gdbarch = gdbarch;
13468
13469 static const uint16_t insns[] = {
13470 /* db b2 uxtb r3, r3 */
13471 0xb2db,
13472 /* cd 58 ldr r5, [r1, r3] */
13473 0x58cd,
13474 };
13475
13476 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13477 instruction_reader_thumb reader (endian, insns);
13478 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13479 THUMB_INSN_SIZE_BYTES);
13480
13481 SELF_CHECK (ret == 0);
13482 SELF_CHECK (arm_record.mem_rec_count == 0);
13483 SELF_CHECK (arm_record.reg_rec_count == 1);
13484 SELF_CHECK (arm_record.arm_regs[0] == 3);
13485
13486 arm_record.this_addr += 2;
13487 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13488 THUMB_INSN_SIZE_BYTES);
13489
13490 SELF_CHECK (ret == 0);
13491 SELF_CHECK (arm_record.mem_rec_count == 0);
13492 SELF_CHECK (arm_record.reg_rec_count == 1);
13493 SELF_CHECK (arm_record.arm_regs[0] == 5);
13494 }
13495
13496 /* 32-bit Thumb-2 instructions. */
13497 {
13498 insn_decode_record arm_record;
13499
13500 memset (&arm_record, 0, sizeof (insn_decode_record));
13501 arm_record.gdbarch = gdbarch;
13502
13503 static const uint16_t insns[] = {
13504 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13505 0xee1d, 0x7f70,
13506 };
13507
13508 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13509 instruction_reader_thumb reader (endian, insns);
13510 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13511 THUMB2_INSN_SIZE_BYTES);
13512
13513 SELF_CHECK (ret == 0);
13514 SELF_CHECK (arm_record.mem_rec_count == 0);
13515 SELF_CHECK (arm_record.reg_rec_count == 1);
13516 SELF_CHECK (arm_record.arm_regs[0] == 7);
13517 }
13518 }
13519
13520 /* Instruction reader from manually cooked instruction sequences. */
13521
13522 class test_arm_instruction_reader : public arm_instruction_reader
13523 {
13524 public:
13525 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13526 : m_insns (insns)
13527 {}
13528
13529 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13530 {
13531 SELF_CHECK (memaddr % 4 == 0);
13532 SELF_CHECK (memaddr / 4 < m_insns.size ());
13533
13534 return m_insns[memaddr / 4];
13535 }
13536
13537 private:
13538 const gdb::array_view<const uint32_t> m_insns;
13539 };
13540
13541 static void
13542 arm_analyze_prologue_test ()
13543 {
13544 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13545 {
13546 struct gdbarch_info info;
13547 info.byte_order = endianness;
13548 info.byte_order_for_code = endianness;
13549 info.bfd_arch_info = bfd_scan_arch ("arm");
13550
13551 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13552
13553 SELF_CHECK (gdbarch != NULL);
13554
13555 /* The "sub" instruction contains an immediate value rotate count of 0,
13556 which resulted in a 32-bit shift of a 32-bit value, caught by
13557 UBSan. */
13558 const uint32_t insns[] = {
13559 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13560 0xe1a05000, /* mov r5, r0 */
13561 0xe5903020, /* ldr r3, [r0, #32] */
13562 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13563 };
13564
13565 test_arm_instruction_reader mem_reader (insns);
13566 arm_prologue_cache cache;
13567 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13568
13569 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13570 }
13571 }
13572
13573 } // namespace selftests
13574 #endif /* GDB_SELF_TEST */
13575
13576 /* Cleans up local record registers and memory allocations. */
13577
13578 static void
13579 deallocate_reg_mem (insn_decode_record *record)
13580 {
13581 xfree (record->arm_regs);
13582 xfree (record->arm_mems);
13583 }
13584
13585
13586 /* Parse the current instruction and record the values of the registers and
13587 memory that will be changed in current instruction to record_arch_list".
13588 Return -1 if something is wrong. */
13589
13590 int
13591 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13592 CORE_ADDR insn_addr)
13593 {
13594
13595 uint32_t no_of_rec = 0;
13596 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13597 ULONGEST t_bit = 0, insn_id = 0;
13598
13599 ULONGEST u_regval = 0;
13600
13601 insn_decode_record arm_record;
13602
13603 memset (&arm_record, 0, sizeof (insn_decode_record));
13604 arm_record.regcache = regcache;
13605 arm_record.this_addr = insn_addr;
13606 arm_record.gdbarch = gdbarch;
13607
13608
13609 if (record_debug > 1)
13610 {
13611 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13612 "addr = %s\n",
13613 paddress (gdbarch, arm_record.this_addr));
13614 }
13615
13616 instruction_reader reader;
13617 if (extract_arm_insn (reader, &arm_record, 2))
13618 {
13619 if (record_debug)
13620 {
13621 printf_unfiltered (_("Process record: error reading memory at "
13622 "addr %s len = %d.\n"),
13623 paddress (arm_record.gdbarch,
13624 arm_record.this_addr), 2);
13625 }
13626 return -1;
13627 }
13628
13629 /* Check the insn, whether it is thumb or arm one. */
13630
13631 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13632 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13633
13634
13635 if (!(u_regval & t_bit))
13636 {
13637 /* We are decoding arm insn. */
13638 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13639 }
13640 else
13641 {
13642 insn_id = bits (arm_record.arm_insn, 11, 15);
13643 /* is it thumb2 insn? */
13644 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13645 {
13646 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13647 THUMB2_INSN_SIZE_BYTES);
13648 }
13649 else
13650 {
13651 /* We are decoding thumb insn. */
13652 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13653 THUMB_INSN_SIZE_BYTES);
13654 }
13655 }
13656
13657 if (0 == ret)
13658 {
13659 /* Record registers. */
13660 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13661 if (arm_record.arm_regs)
13662 {
13663 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13664 {
13665 if (record_full_arch_list_add_reg
13666 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13667 ret = -1;
13668 }
13669 }
13670 /* Record memories. */
13671 if (arm_record.arm_mems)
13672 {
13673 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13674 {
13675 if (record_full_arch_list_add_mem
13676 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13677 arm_record.arm_mems[no_of_rec].len))
13678 ret = -1;
13679 }
13680 }
13681
13682 if (record_full_arch_list_add_end ())
13683 ret = -1;
13684 }
13685
13686
13687 deallocate_reg_mem (&arm_record);
13688
13689 return ret;
13690 }
13691
13692 /* See arm-tdep.h. */
13693
13694 const target_desc *
13695 arm_read_description (arm_fp_type fp_type)
13696 {
13697 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13698
13699 if (tdesc == nullptr)
13700 {
13701 tdesc = arm_create_target_description (fp_type);
13702 tdesc_arm_list[fp_type] = tdesc;
13703 }
13704
13705 return tdesc;
13706 }
13707
13708 /* See arm-tdep.h. */
13709
13710 const target_desc *
13711 arm_read_mprofile_description (arm_m_profile_type m_type)
13712 {
13713 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13714
13715 if (tdesc == nullptr)
13716 {
13717 tdesc = arm_create_mprofile_target_description (m_type);
13718 tdesc_arm_mprofile_list[m_type] = tdesc;
13719 }
13720
13721 return tdesc;
13722 }