x86: Adjust linker tests for --disable-separate-code
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Print an "arm" debug statement. */
72
73 #define arm_debug_printf(fmt, ...) \
74 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
75
76 /* Macros for setting and testing a bit in a minimal symbol that marks
77 it as Thumb function. The MSB of the minimal symbol's "info" field
78 is used for this purpose.
79
80 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
81 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
82
83 #define MSYMBOL_SET_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym) = 1
85
86 #define MSYMBOL_IS_SPECIAL(msym) \
87 MSYMBOL_TARGET_FLAG_1 (msym)
88
89 struct arm_mapping_symbol
90 {
91 CORE_ADDR value;
92 char type;
93
94 bool operator< (const arm_mapping_symbol &other) const
95 { return this->value < other.value; }
96 };
97
98 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
99
100 struct arm_per_bfd
101 {
102 explicit arm_per_bfd (size_t num_sections)
103 : section_maps (new arm_mapping_symbol_vec[num_sections]),
104 section_maps_sorted (new bool[num_sections] ())
105 {}
106
107 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
108
109 /* Information about mapping symbols ($a, $d, $t) in the objfile.
110
111 The format is an array of vectors of arm_mapping_symbols, there is one
112 vector for each section of the objfile (the array is index by BFD section
113 index).
114
115 For each section, the vector of arm_mapping_symbol is sorted by
116 symbol value (address). */
117 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
118
119 /* For each corresponding element of section_maps above, is this vector
120 sorted. */
121 std::unique_ptr<bool[]> section_maps_sorted;
122 };
123
124 /* Per-bfd data used for mapping symbols. */
125 static bfd_key<arm_per_bfd> arm_bfd_data_key;
126
127 /* The list of available "set arm ..." and "show arm ..." commands. */
128 static struct cmd_list_element *setarmcmdlist = NULL;
129 static struct cmd_list_element *showarmcmdlist = NULL;
130
131 /* The type of floating-point to use. Keep this in sync with enum
132 arm_float_model, and the help string in _initialize_arm_tdep. */
133 static const char *const fp_model_strings[] =
134 {
135 "auto",
136 "softfpa",
137 "fpa",
138 "softvfp",
139 "vfp",
140 NULL
141 };
142
143 /* A variable that can be configured by the user. */
144 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
145 static const char *current_fp_model = "auto";
146
147 /* The ABI to use. Keep this in sync with arm_abi_kind. */
148 static const char *const arm_abi_strings[] =
149 {
150 "auto",
151 "APCS",
152 "AAPCS",
153 NULL
154 };
155
156 /* A variable that can be configured by the user. */
157 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
158 static const char *arm_abi_string = "auto";
159
160 /* The execution mode to assume. */
161 static const char *const arm_mode_strings[] =
162 {
163 "auto",
164 "arm",
165 "thumb",
166 NULL
167 };
168
169 static const char *arm_fallback_mode_string = "auto";
170 static const char *arm_force_mode_string = "auto";
171
172 /* The standard register names, and all the valid aliases for them. Note
173 that `fp', `sp' and `pc' are not added in this alias list, because they
174 have been added as builtin user registers in
175 std-regs.c:_initialize_frame_reg. */
176 static const struct
177 {
178 const char *name;
179 int regnum;
180 } arm_register_aliases[] = {
181 /* Basic register numbers. */
182 { "r0", 0 },
183 { "r1", 1 },
184 { "r2", 2 },
185 { "r3", 3 },
186 { "r4", 4 },
187 { "r5", 5 },
188 { "r6", 6 },
189 { "r7", 7 },
190 { "r8", 8 },
191 { "r9", 9 },
192 { "r10", 10 },
193 { "r11", 11 },
194 { "r12", 12 },
195 { "r13", 13 },
196 { "r14", 14 },
197 { "r15", 15 },
198 /* Synonyms (argument and variable registers). */
199 { "a1", 0 },
200 { "a2", 1 },
201 { "a3", 2 },
202 { "a4", 3 },
203 { "v1", 4 },
204 { "v2", 5 },
205 { "v3", 6 },
206 { "v4", 7 },
207 { "v5", 8 },
208 { "v6", 9 },
209 { "v7", 10 },
210 { "v8", 11 },
211 /* Other platform-specific names for r9. */
212 { "sb", 9 },
213 { "tr", 9 },
214 /* Special names. */
215 { "ip", 12 },
216 { "lr", 14 },
217 /* Names used by GCC (not listed in the ARM EABI). */
218 { "sl", 10 },
219 /* A special name from the older ATPCS. */
220 { "wr", 7 },
221 };
222
223 static const char *const arm_register_names[] =
224 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
225 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
226 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
227 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
228 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
229 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
230 "fps", "cpsr" }; /* 24 25 */
231
232 /* Holds the current set of options to be passed to the disassembler. */
233 static char *arm_disassembler_options;
234
235 /* Valid register name styles. */
236 static const char **valid_disassembly_styles;
237
238 /* Disassembly style to use. Default to "std" register names. */
239 static const char *disassembly_style;
240
241 /* All possible arm target descriptors. */
242 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
243 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
244
245 /* This is used to keep the bfd arch_info in sync with the disassembly
246 style. */
247 static void set_disassembly_style_sfunc (const char *, int,
248 struct cmd_list_element *);
249 static void show_disassembly_style_sfunc (struct ui_file *, int,
250 struct cmd_list_element *,
251 const char *);
252
253 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
254 readable_regcache *regcache,
255 int regnum, gdb_byte *buf);
256 static void arm_neon_quad_write (struct gdbarch *gdbarch,
257 struct regcache *regcache,
258 int regnum, const gdb_byte *buf);
259
260 static CORE_ADDR
261 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
262
263
264 /* get_next_pcs operations. */
265 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
266 arm_get_next_pcs_read_memory_unsigned_integer,
267 arm_get_next_pcs_syscall_next_pc,
268 arm_get_next_pcs_addr_bits_remove,
269 arm_get_next_pcs_is_thumb,
270 NULL,
271 };
272
273 struct arm_prologue_cache
274 {
275 /* The stack pointer at the time this frame was created; i.e. the
276 caller's stack pointer when this function was called. It is used
277 to identify this frame. */
278 CORE_ADDR prev_sp;
279
280 /* The frame base for this frame is just prev_sp - frame size.
281 FRAMESIZE is the distance from the frame pointer to the
282 initial stack pointer. */
283
284 int framesize;
285
286 /* The register used to hold the frame pointer for this frame. */
287 int framereg;
288
289 /* Saved register offsets. */
290 trad_frame_saved_reg *saved_regs;
291 };
292
293 namespace {
294
295 /* Abstract class to read ARM instructions from memory. */
296
297 class arm_instruction_reader
298 {
299 public:
300 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
301 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
302 };
303
304 /* Read instructions from target memory. */
305
306 class target_arm_instruction_reader : public arm_instruction_reader
307 {
308 public:
309 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
310 {
311 return read_code_unsigned_integer (memaddr, 4, byte_order);
312 }
313 };
314
315 } /* namespace */
316
317 static CORE_ADDR arm_analyze_prologue
318 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
319 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
320
321 /* Architecture version for displaced stepping. This effects the behaviour of
322 certain instructions, and really should not be hard-wired. */
323
324 #define DISPLACED_STEPPING_ARCH_VERSION 5
325
326 /* See arm-tdep.h. */
327
328 bool arm_apcs_32 = true;
329
330 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
331
332 int
333 arm_psr_thumb_bit (struct gdbarch *gdbarch)
334 {
335 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
336
337 if (tdep->is_m)
338 return XPSR_T;
339 else
340 return CPSR_T;
341 }
342
343 /* Determine if the processor is currently executing in Thumb mode. */
344
345 int
346 arm_is_thumb (struct regcache *regcache)
347 {
348 ULONGEST cpsr;
349 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
350
351 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
352
353 return (cpsr & t_bit) != 0;
354 }
355
356 /* Determine if FRAME is executing in Thumb mode. */
357
358 int
359 arm_frame_is_thumb (struct frame_info *frame)
360 {
361 CORE_ADDR cpsr;
362 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
363
364 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
365 directly (from a signal frame or dummy frame) or by interpreting
366 the saved LR (from a prologue or DWARF frame). So consult it and
367 trust the unwinders. */
368 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
369
370 return (cpsr & t_bit) != 0;
371 }
372
373 /* Search for the mapping symbol covering MEMADDR. If one is found,
374 return its type. Otherwise, return 0. If START is non-NULL,
375 set *START to the location of the mapping symbol. */
376
377 static char
378 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
379 {
380 struct obj_section *sec;
381
382 /* If there are mapping symbols, consult them. */
383 sec = find_pc_section (memaddr);
384 if (sec != NULL)
385 {
386 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
387 if (data != NULL)
388 {
389 unsigned int section_idx = sec->the_bfd_section->index;
390 arm_mapping_symbol_vec &map
391 = data->section_maps[section_idx];
392
393 /* Sort the vector on first use. */
394 if (!data->section_maps_sorted[section_idx])
395 {
396 std::sort (map.begin (), map.end ());
397 data->section_maps_sorted[section_idx] = true;
398 }
399
400 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
401 arm_mapping_symbol_vec::const_iterator it
402 = std::lower_bound (map.begin (), map.end (), map_key);
403
404 /* std::lower_bound finds the earliest ordered insertion
405 point. If the symbol at this position starts at this exact
406 address, we use that; otherwise, the preceding
407 mapping symbol covers this address. */
408 if (it < map.end ())
409 {
410 if (it->value == map_key.value)
411 {
412 if (start)
413 *start = it->value + sec->addr ();
414 return it->type;
415 }
416 }
417
418 if (it > map.begin ())
419 {
420 arm_mapping_symbol_vec::const_iterator prev_it
421 = it - 1;
422
423 if (start)
424 *start = prev_it->value + sec->addr ();
425 return prev_it->type;
426 }
427 }
428 }
429
430 return 0;
431 }
432
433 /* Determine if the program counter specified in MEMADDR is in a Thumb
434 function. This function should be called for addresses unrelated to
435 any executing frame; otherwise, prefer arm_frame_is_thumb. */
436
437 int
438 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
439 {
440 struct bound_minimal_symbol sym;
441 char type;
442 arm_displaced_step_copy_insn_closure *dsc = nullptr;
443 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
444
445 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
446 dsc = ((arm_displaced_step_copy_insn_closure * )
447 gdbarch_displaced_step_copy_insn_closure_by_addr
448 (gdbarch, current_inferior (), memaddr));
449
450 /* If checking the mode of displaced instruction in copy area, the mode
451 should be determined by instruction on the original address. */
452 if (dsc)
453 {
454 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
455 (unsigned long) dsc->insn_addr,
456 (unsigned long) memaddr);
457 memaddr = dsc->insn_addr;
458 }
459
460 /* If bit 0 of the address is set, assume this is a Thumb address. */
461 if (IS_THUMB_ADDR (memaddr))
462 return 1;
463
464 /* If the user wants to override the symbol table, let him. */
465 if (strcmp (arm_force_mode_string, "arm") == 0)
466 return 0;
467 if (strcmp (arm_force_mode_string, "thumb") == 0)
468 return 1;
469
470 /* ARM v6-M and v7-M are always in Thumb mode. */
471 if (tdep->is_m)
472 return 1;
473
474 /* If there are mapping symbols, consult them. */
475 type = arm_find_mapping_symbol (memaddr, NULL);
476 if (type)
477 return type == 't';
478
479 /* Thumb functions have a "special" bit set in minimal symbols. */
480 sym = lookup_minimal_symbol_by_pc (memaddr);
481 if (sym.minsym)
482 return (MSYMBOL_IS_SPECIAL (sym.minsym));
483
484 /* If the user wants to override the fallback mode, let them. */
485 if (strcmp (arm_fallback_mode_string, "arm") == 0)
486 return 0;
487 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
488 return 1;
489
490 /* If we couldn't find any symbol, but we're talking to a running
491 target, then trust the current value of $cpsr. This lets
492 "display/i $pc" always show the correct mode (though if there is
493 a symbol table we will not reach here, so it still may not be
494 displayed in the mode it will be executed). */
495 if (target_has_registers ())
496 return arm_frame_is_thumb (get_current_frame ());
497
498 /* Otherwise we're out of luck; we assume ARM. */
499 return 0;
500 }
501
502 /* Determine if the address specified equals any of these magic return
503 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
504 architectures.
505
506 From ARMv6-M Reference Manual B1.5.8
507 Table B1-5 Exception return behavior
508
509 EXC_RETURN Return To Return Stack
510 0xFFFFFFF1 Handler mode Main
511 0xFFFFFFF9 Thread mode Main
512 0xFFFFFFFD Thread mode Process
513
514 From ARMv7-M Reference Manual B1.5.8
515 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
516
517 EXC_RETURN Return To Return Stack
518 0xFFFFFFF1 Handler mode Main
519 0xFFFFFFF9 Thread mode Main
520 0xFFFFFFFD Thread mode Process
521
522 Table B1-9 EXC_RETURN definition of exception return behavior, with
523 FP
524
525 EXC_RETURN Return To Return Stack Frame Type
526 0xFFFFFFE1 Handler mode Main Extended
527 0xFFFFFFE9 Thread mode Main Extended
528 0xFFFFFFED Thread mode Process Extended
529 0xFFFFFFF1 Handler mode Main Basic
530 0xFFFFFFF9 Thread mode Main Basic
531 0xFFFFFFFD Thread mode Process Basic
532
533 For more details see "B1.5.8 Exception return behavior"
534 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
535
536 In the ARMv8-M Architecture Technical Reference also adds
537 for implementations without the Security Extension:
538
539 EXC_RETURN Condition
540 0xFFFFFFB0 Return to Handler mode.
541 0xFFFFFFB8 Return to Thread mode using the main stack.
542 0xFFFFFFBC Return to Thread mode using the process stack. */
543
544 static int
545 arm_m_addr_is_magic (CORE_ADDR addr)
546 {
547 switch (addr)
548 {
549 /* Values from ARMv8-M Architecture Technical Reference. */
550 case 0xffffffb0:
551 case 0xffffffb8:
552 case 0xffffffbc:
553 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
554 the exception return behavior. */
555 case 0xffffffe1:
556 case 0xffffffe9:
557 case 0xffffffed:
558 case 0xfffffff1:
559 case 0xfffffff9:
560 case 0xfffffffd:
561 /* Address is magic. */
562 return 1;
563
564 default:
565 /* Address is not magic. */
566 return 0;
567 }
568 }
569
570 /* Remove useless bits from addresses in a running program. */
571 static CORE_ADDR
572 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
573 {
574 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
575
576 /* On M-profile devices, do not strip the low bit from EXC_RETURN
577 (the magic exception return address). */
578 if (tdep->is_m && arm_m_addr_is_magic (val))
579 return val;
580
581 if (arm_apcs_32)
582 return UNMAKE_THUMB_ADDR (val);
583 else
584 return (val & 0x03fffffc);
585 }
586
587 /* Return 1 if PC is the start of a compiler helper function which
588 can be safely ignored during prologue skipping. IS_THUMB is true
589 if the function is known to be a Thumb function due to the way it
590 is being called. */
591 static int
592 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
593 {
594 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
595 struct bound_minimal_symbol msym;
596
597 msym = lookup_minimal_symbol_by_pc (pc);
598 if (msym.minsym != NULL
599 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
600 && msym.minsym->linkage_name () != NULL)
601 {
602 const char *name = msym.minsym->linkage_name ();
603
604 /* The GNU linker's Thumb call stub to foo is named
605 __foo_from_thumb. */
606 if (strstr (name, "_from_thumb") != NULL)
607 name += 2;
608
609 /* On soft-float targets, __truncdfsf2 is called to convert promoted
610 arguments to their argument types in non-prototyped
611 functions. */
612 if (startswith (name, "__truncdfsf2"))
613 return 1;
614 if (startswith (name, "__aeabi_d2f"))
615 return 1;
616
617 /* Internal functions related to thread-local storage. */
618 if (startswith (name, "__tls_get_addr"))
619 return 1;
620 if (startswith (name, "__aeabi_read_tp"))
621 return 1;
622 }
623 else
624 {
625 /* If we run against a stripped glibc, we may be unable to identify
626 special functions by name. Check for one important case,
627 __aeabi_read_tp, by comparing the *code* against the default
628 implementation (this is hand-written ARM assembler in glibc). */
629
630 if (!is_thumb
631 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
632 == 0xe3e00a0f /* mov r0, #0xffff0fff */
633 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
634 == 0xe240f01f) /* sub pc, r0, #31 */
635 return 1;
636 }
637
638 return 0;
639 }
640
641 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
642 the first 16-bit of instruction, and INSN2 is the second 16-bit of
643 instruction. */
644 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
645 ((bits ((insn1), 0, 3) << 12) \
646 | (bits ((insn1), 10, 10) << 11) \
647 | (bits ((insn2), 12, 14) << 8) \
648 | bits ((insn2), 0, 7))
649
650 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
651 the 32-bit instruction. */
652 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
653 ((bits ((insn), 16, 19) << 12) \
654 | bits ((insn), 0, 11))
655
656 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
657
658 static unsigned int
659 thumb_expand_immediate (unsigned int imm)
660 {
661 unsigned int count = imm >> 7;
662
663 if (count < 8)
664 switch (count / 2)
665 {
666 case 0:
667 return imm & 0xff;
668 case 1:
669 return (imm & 0xff) | ((imm & 0xff) << 16);
670 case 2:
671 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
672 case 3:
673 return (imm & 0xff) | ((imm & 0xff) << 8)
674 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
675 }
676
677 return (0x80 | (imm & 0x7f)) << (32 - count);
678 }
679
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
682
683 static int
684 thumb_instruction_restores_sp (unsigned short insn)
685 {
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
689 }
690
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
696
697 static CORE_ADDR
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
701 {
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 int i;
705 pv_t regs[16];
706 CORE_ADDR offset;
707 CORE_ADDR unrecognized_pc = 0;
708
709 for (i = 0; i < 16; i++)
710 regs[i] = pv_register (i, 0);
711 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
712
713 while (start < limit)
714 {
715 unsigned short insn;
716
717 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
718
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
720 {
721 int regno;
722 int mask;
723
724 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
725 break;
726
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
730
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
734 {
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
736 -4);
737 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
738 }
739 }
740 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
741 {
742 offset = (insn & 0x7f) << 2; /* get scaled offset */
743 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
744 -offset);
745 }
746 else if (thumb_instruction_restores_sp (insn))
747 {
748 /* Don't scan past the epilogue. */
749 break;
750 }
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
753 (insn & 0xff) << 2);
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
757 bits (insn, 6, 8));
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
761 bits (insn, 0, 7));
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
769 {
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
773 }
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
775 {
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
779 }
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
781 {
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
786 pv_t addr;
787
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
790
791 if (stack.store_would_trash (addr))
792 break;
793
794 stack.store (addr, 4, regs[regno]);
795 }
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
797 {
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
800 pv_t addr;
801
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
804
805 if (stack.store_would_trash (addr))
806 break;
807
808 stack.store (addr, 4, regs[rd]);
809 }
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
814 ;
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
819 ;
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
824 ;
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
829 ;
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
832 on Thumb. */
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
835 {
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
838 CORE_ADDR loc;
839
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
843 }
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
845 {
846 unsigned short inst2;
847
848 inst2 = read_code_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
850
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
852 {
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
856 CORE_ADDR nextpc;
857 int j1, j2, imm1, imm2;
858
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
863
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
866
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
871
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
874 break;
875 }
876
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
878 { registers } */
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 {
881 pv_t addr = regs[bits (insn, 0, 3)];
882 int regno;
883
884 if (stack.store_would_trash (addr))
885 break;
886
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
890 {
891 addr = pv_add_constant (addr, -4);
892 stack.store (addr, 4, regs[regno]);
893 }
894
895 if (insn & 0x0020)
896 regs[bits (insn, 0, 3)] = addr;
897 }
898
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
900 [Rn, #+/-imm]{!} */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
906
907 offset = inst2 & 0xff;
908 if (insn & 0x0080)
909 addr = pv_add_constant (addr, offset);
910 else
911 addr = pv_add_constant (addr, -offset);
912
913 if (stack.store_would_trash (addr))
914 break;
915
916 stack.store (addr, 4, regs[regno1]);
917 stack.store (pv_add_constant (addr, 4),
918 4, regs[regno2]);
919
920 if (insn & 0x0020)
921 regs[bits (insn, 0, 3)] = addr;
922 }
923
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
927 {
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
930
931 offset = inst2 & 0xff;
932 if (inst2 & 0x0200)
933 addr = pv_add_constant (addr, offset);
934 else
935 addr = pv_add_constant (addr, -offset);
936
937 if (stack.store_would_trash (addr))
938 break;
939
940 stack.store (addr, 4, regs[regno]);
941
942 if (inst2 & 0x0100)
943 regs[bits (insn, 0, 3)] = addr;
944 }
945
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 {
949 int regno = bits (inst2, 12, 15);
950 pv_t addr;
951
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
954
955 if (stack.store_would_trash (addr))
956 break;
957
958 stack.store (addr, 4, regs[regno]);
959 }
960
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
964 ;
965
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
970 ;
971
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
973 { registers } */
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
978 ;
979
980 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
981 [Rn, #+/-imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
984 ;
985
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
990 ;
991
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
995 ;
996
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
999 {
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1003
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1011 {
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1015
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1018 }
1019
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1022 {
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1026
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1030 }
1031
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1034 {
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1038
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1041 }
1042
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1044 {
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1048
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1051 }
1052
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1054 {
1055 unsigned int imm
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1057
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1059 }
1060
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1063 {
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1067 }
1068
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1070 {
1071 /* Constant pool loads. */
1072 unsigned int constant;
1073 CORE_ADDR loc;
1074
1075 offset = bits (inst2, 0, 11);
1076 if (insn & 0x0080)
1077 loc = start + 4 + offset;
1078 else
1079 loc = start + 4 - offset;
1080
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1083 }
1084
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1086 {
1087 /* Constant pool loads. */
1088 unsigned int constant;
1089 CORE_ADDR loc;
1090
1091 offset = bits (inst2, 0, 7) << 2;
1092 if (insn & 0x0080)
1093 loc = start + 4 + offset;
1094 else
1095 loc = start + 4 - offset;
1096
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1099
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1102 }
1103
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1105 {
1106 /* Don't scan past anything that might change control flow. */
1107 break;
1108 }
1109 else
1110 {
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1114 }
1115
1116 start += 2;
1117 }
1118 else if (thumb_instruction_changes_pc (insn))
1119 {
1120 /* Don't scan past anything that might change control flow. */
1121 break;
1122 }
1123 else
1124 {
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1128 }
1129
1130 start += 2;
1131 }
1132
1133 arm_debug_printf ("Prologue scan stopped at %s",
1134 paddress (gdbarch, start));
1135
1136 if (unrecognized_pc == 0)
1137 unrecognized_pc = start;
1138
1139 if (cache == NULL)
1140 return unrecognized_pc;
1141
1142 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1143 {
1144 /* Frame pointer is fp. Frame size is constant. */
1145 cache->framereg = ARM_FP_REGNUM;
1146 cache->framesize = -regs[ARM_FP_REGNUM].k;
1147 }
1148 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1149 {
1150 /* Frame pointer is r7. Frame size is constant. */
1151 cache->framereg = THUMB_FP_REGNUM;
1152 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1153 }
1154 else
1155 {
1156 /* Try the stack pointer... this is a bit desperate. */
1157 cache->framereg = ARM_SP_REGNUM;
1158 cache->framesize = -regs[ARM_SP_REGNUM].k;
1159 }
1160
1161 for (i = 0; i < 16; i++)
1162 if (stack.find_reg (gdbarch, i, &offset))
1163 cache->saved_regs[i].set_addr (offset);
1164
1165 return unrecognized_pc;
1166 }
1167
1168
1169 /* Try to analyze the instructions starting from PC, which load symbol
1170 __stack_chk_guard. Return the address of instruction after loading this
1171 symbol, set the dest register number to *BASEREG, and set the size of
1172 instructions for loading symbol in OFFSET. Return 0 if instructions are
1173 not recognized. */
1174
1175 static CORE_ADDR
1176 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1177 unsigned int *destreg, int *offset)
1178 {
1179 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1180 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1181 unsigned int low, high, address;
1182
1183 address = 0;
1184 if (is_thumb)
1185 {
1186 unsigned short insn1
1187 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1188
1189 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1190 {
1191 *destreg = bits (insn1, 8, 10);
1192 *offset = 2;
1193 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1194 address = read_memory_unsigned_integer (address, 4,
1195 byte_order_for_code);
1196 }
1197 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1198 {
1199 unsigned short insn2
1200 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1201
1202 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203
1204 insn1
1205 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1206 insn2
1207 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1208
1209 /* movt Rd, #const */
1210 if ((insn1 & 0xfbc0) == 0xf2c0)
1211 {
1212 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1213 *destreg = bits (insn2, 8, 11);
1214 *offset = 8;
1215 address = (high << 16 | low);
1216 }
1217 }
1218 }
1219 else
1220 {
1221 unsigned int insn
1222 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1223
1224 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1225 {
1226 address = bits (insn, 0, 11) + pc + 8;
1227 address = read_memory_unsigned_integer (address, 4,
1228 byte_order_for_code);
1229
1230 *destreg = bits (insn, 12, 15);
1231 *offset = 4;
1232 }
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1234 {
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1236
1237 insn
1238 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1239
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1241 {
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1244 *offset = 8;
1245 address = (high << 16 | low);
1246 }
1247 }
1248 }
1249
1250 return address;
1251 }
1252
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1256
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1261
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1266 they are,
1267
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1270
1271 On ARMv5t, it is,
1272
1273 ldr Rn, .Label
1274 ....
1275 .Lable:
1276 .word __stack_chk_guard
1277
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1282
1283 static CORE_ADDR
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1285 {
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct bound_minimal_symbol stack_chk_guard;
1289 int offset;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1291 CORE_ADDR addr;
1292
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1295 &basereg, &offset);
1296 if (!addr)
1297 return pc;
1298
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1301 Otherwise, this sequence cannot be for stack protector. */
1302 if (stack_chk_guard.minsym == NULL
1303 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1304 return pc;
1305
1306 if (is_thumb)
1307 {
1308 unsigned int destreg;
1309 unsigned short insn
1310 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1311
1312 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1313 if ((insn & 0xf800) != 0x6800)
1314 return pc;
1315 if (bits (insn, 3, 5) != basereg)
1316 return pc;
1317 destreg = bits (insn, 0, 2);
1318
1319 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1320 byte_order_for_code);
1321 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1322 if ((insn & 0xf800) != 0x6000)
1323 return pc;
1324 if (destreg != bits (insn, 0, 2))
1325 return pc;
1326 }
1327 else
1328 {
1329 unsigned int destreg;
1330 unsigned int insn
1331 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1332
1333 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1334 if ((insn & 0x0e500000) != 0x04100000)
1335 return pc;
1336 if (bits (insn, 16, 19) != basereg)
1337 return pc;
1338 destreg = bits (insn, 12, 15);
1339 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1340 insn = read_code_unsigned_integer (pc + offset + 4,
1341 4, byte_order_for_code);
1342 if ((insn & 0x0e500000) != 0x04000000)
1343 return pc;
1344 if (bits (insn, 12, 15) != destreg)
1345 return pc;
1346 }
1347 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1348 on arm. */
1349 if (is_thumb)
1350 return pc + offset + 4;
1351 else
1352 return pc + offset + 8;
1353 }
1354
1355 /* Advance the PC across any function entry prologue instructions to
1356 reach some "real" code.
1357
1358 The APCS (ARM Procedure Call Standard) defines the following
1359 prologue:
1360
1361 mov ip, sp
1362 [stmfd sp!, {a1,a2,a3,a4}]
1363 stmfd sp!, {...,fp,ip,lr,pc}
1364 [stfe f7, [sp, #-12]!]
1365 [stfe f6, [sp, #-12]!]
1366 [stfe f5, [sp, #-12]!]
1367 [stfe f4, [sp, #-12]!]
1368 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1369
1370 static CORE_ADDR
1371 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1372 {
1373 CORE_ADDR func_addr, limit_pc;
1374
1375 /* See if we can determine the end of the prologue via the symbol table.
1376 If so, then return either PC, or the PC after the prologue, whichever
1377 is greater. */
1378 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1379 {
1380 CORE_ADDR post_prologue_pc
1381 = skip_prologue_using_sal (gdbarch, func_addr);
1382 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1383
1384 if (post_prologue_pc)
1385 post_prologue_pc
1386 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1387
1388
1389 /* GCC always emits a line note before the prologue and another
1390 one after, even if the two are at the same address or on the
1391 same line. Take advantage of this so that we do not need to
1392 know every instruction that might appear in the prologue. We
1393 will have producer information for most binaries; if it is
1394 missing (e.g. for -gstabs), assuming the GNU tools. */
1395 if (post_prologue_pc
1396 && (cust == NULL
1397 || COMPUNIT_PRODUCER (cust) == NULL
1398 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1399 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1400 return post_prologue_pc;
1401
1402 if (post_prologue_pc != 0)
1403 {
1404 CORE_ADDR analyzed_limit;
1405
1406 /* For non-GCC compilers, make sure the entire line is an
1407 acceptable prologue; GDB will round this function's
1408 return value up to the end of the following line so we
1409 can not skip just part of a line (and we do not want to).
1410
1411 RealView does not treat the prologue specially, but does
1412 associate prologue code with the opening brace; so this
1413 lets us skip the first line if we think it is the opening
1414 brace. */
1415 if (arm_pc_is_thumb (gdbarch, func_addr))
1416 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1417 post_prologue_pc, NULL);
1418 else
1419 analyzed_limit
1420 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1421 NULL, target_arm_instruction_reader ());
1422
1423 if (analyzed_limit != post_prologue_pc)
1424 return func_addr;
1425
1426 return post_prologue_pc;
1427 }
1428 }
1429
1430 /* Can't determine prologue from the symbol table, need to examine
1431 instructions. */
1432
1433 /* Find an upper limit on the function prologue using the debug
1434 information. If the debug information could not be used to provide
1435 that bound, then use an arbitrary large number as the upper bound. */
1436 /* Like arm_scan_prologue, stop no later than pc + 64. */
1437 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1438 if (limit_pc == 0)
1439 limit_pc = pc + 64; /* Magic. */
1440
1441
1442 /* Check if this is Thumb code. */
1443 if (arm_pc_is_thumb (gdbarch, pc))
1444 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1445 else
1446 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1447 target_arm_instruction_reader ());
1448 }
1449
1450 /* *INDENT-OFF* */
1451 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1452 This function decodes a Thumb function prologue to determine:
1453 1) the size of the stack frame
1454 2) which registers are saved on it
1455 3) the offsets of saved regs
1456 4) the offset from the stack pointer to the frame pointer
1457
1458 A typical Thumb function prologue would create this stack frame
1459 (offsets relative to FP)
1460 old SP -> 24 stack parameters
1461 20 LR
1462 16 R7
1463 R7 -> 0 local variables (16 bytes)
1464 SP -> -12 additional stack space (12 bytes)
1465 The frame size would thus be 36 bytes, and the frame offset would be
1466 12 bytes. The frame register is R7.
1467
1468 The comments for thumb_skip_prolog() describe the algorithm we use
1469 to detect the end of the prolog. */
1470 /* *INDENT-ON* */
1471
1472 static void
1473 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1474 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1475 {
1476 CORE_ADDR prologue_start;
1477 CORE_ADDR prologue_end;
1478
1479 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1480 &prologue_end))
1481 {
1482 /* See comment in arm_scan_prologue for an explanation of
1483 this heuristics. */
1484 if (prologue_end > prologue_start + 64)
1485 {
1486 prologue_end = prologue_start + 64;
1487 }
1488 }
1489 else
1490 /* We're in the boondocks: we have no idea where the start of the
1491 function is. */
1492 return;
1493
1494 prologue_end = std::min (prologue_end, prev_pc);
1495
1496 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1497 }
1498
1499 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1500 otherwise. */
1501
1502 static int
1503 arm_instruction_restores_sp (unsigned int insn)
1504 {
1505 if (bits (insn, 28, 31) != INST_NV)
1506 {
1507 if ((insn & 0x0df0f000) == 0x0080d000
1508 /* ADD SP (register or immediate). */
1509 || (insn & 0x0df0f000) == 0x0040d000
1510 /* SUB SP (register or immediate). */
1511 || (insn & 0x0ffffff0) == 0x01a0d000
1512 /* MOV SP. */
1513 || (insn & 0x0fff0000) == 0x08bd0000
1514 /* POP (LDMIA). */
1515 || (insn & 0x0fff0000) == 0x049d0000)
1516 /* POP of a single register. */
1517 return 1;
1518 }
1519
1520 return 0;
1521 }
1522
1523 /* Implement immediate value decoding, as described in section A5.2.4
1524 (Modified immediate constants in ARM instructions) of the ARM Architecture
1525 Reference Manual (ARMv7-A and ARMv7-R edition). */
1526
1527 static uint32_t
1528 arm_expand_immediate (uint32_t imm)
1529 {
1530 /* Immediate values are 12 bits long. */
1531 gdb_assert ((imm & 0xfffff000) == 0);
1532
1533 uint32_t unrotated_value = imm & 0xff;
1534 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1535
1536 if (rotate_amount == 0)
1537 return unrotated_value;
1538
1539 return ((unrotated_value >> rotate_amount)
1540 | (unrotated_value << (32 - rotate_amount)));
1541 }
1542
1543 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1544 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1545 fill it in. Return the first address not recognized as a prologue
1546 instruction.
1547
1548 We recognize all the instructions typically found in ARM prologues,
1549 plus harmless instructions which can be skipped (either for analysis
1550 purposes, or a more restrictive set that can be skipped when finding
1551 the end of the prologue). */
1552
1553 static CORE_ADDR
1554 arm_analyze_prologue (struct gdbarch *gdbarch,
1555 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1556 struct arm_prologue_cache *cache,
1557 const arm_instruction_reader &insn_reader)
1558 {
1559 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1560 int regno;
1561 CORE_ADDR offset, current_pc;
1562 pv_t regs[ARM_FPS_REGNUM];
1563 CORE_ADDR unrecognized_pc = 0;
1564 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1565
1566 /* Search the prologue looking for instructions that set up the
1567 frame pointer, adjust the stack pointer, and save registers.
1568
1569 Be careful, however, and if it doesn't look like a prologue,
1570 don't try to scan it. If, for instance, a frameless function
1571 begins with stmfd sp!, then we will tell ourselves there is
1572 a frame, which will confuse stack traceback, as well as "finish"
1573 and other operations that rely on a knowledge of the stack
1574 traceback. */
1575
1576 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1577 regs[regno] = pv_register (regno, 0);
1578 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1579
1580 for (current_pc = prologue_start;
1581 current_pc < prologue_end;
1582 current_pc += 4)
1583 {
1584 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1585
1586 if (insn == 0xe1a0c00d) /* mov ip, sp */
1587 {
1588 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1589 continue;
1590 }
1591 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1592 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1593 {
1594 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1595 int rd = bits (insn, 12, 15);
1596 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1597 continue;
1598 }
1599 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1600 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1601 {
1602 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1603 int rd = bits (insn, 12, 15);
1604 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1605 continue;
1606 }
1607 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1608 [sp, #-4]! */
1609 {
1610 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1611 break;
1612 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1613 stack.store (regs[ARM_SP_REGNUM], 4,
1614 regs[bits (insn, 12, 15)]);
1615 continue;
1616 }
1617 else if ((insn & 0xffff0000) == 0xe92d0000)
1618 /* stmfd sp!, {..., fp, ip, lr, pc}
1619 or
1620 stmfd sp!, {a1, a2, a3, a4} */
1621 {
1622 int mask = insn & 0xffff;
1623
1624 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1625 break;
1626
1627 /* Calculate offsets of saved registers. */
1628 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1629 if (mask & (1 << regno))
1630 {
1631 regs[ARM_SP_REGNUM]
1632 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1633 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1634 }
1635 }
1636 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1637 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1638 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1639 {
1640 /* No need to add this to saved_regs -- it's just an arg reg. */
1641 continue;
1642 }
1643 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1644 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1645 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1646 {
1647 /* No need to add this to saved_regs -- it's just an arg reg. */
1648 continue;
1649 }
1650 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1651 { registers } */
1652 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1653 {
1654 /* No need to add this to saved_regs -- it's just arg regs. */
1655 continue;
1656 }
1657 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1658 {
1659 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1660 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1661 }
1662 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1663 {
1664 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1665 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1666 }
1667 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1668 [sp, -#c]! */
1669 && tdep->have_fpa_registers)
1670 {
1671 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1672 break;
1673
1674 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1675 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1676 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1677 }
1678 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1679 [sp!] */
1680 && tdep->have_fpa_registers)
1681 {
1682 int n_saved_fp_regs;
1683 unsigned int fp_start_reg, fp_bound_reg;
1684
1685 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1686 break;
1687
1688 if ((insn & 0x800) == 0x800) /* N0 is set */
1689 {
1690 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1691 n_saved_fp_regs = 3;
1692 else
1693 n_saved_fp_regs = 1;
1694 }
1695 else
1696 {
1697 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1698 n_saved_fp_regs = 2;
1699 else
1700 n_saved_fp_regs = 4;
1701 }
1702
1703 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1704 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1705 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1706 {
1707 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1708 stack.store (regs[ARM_SP_REGNUM], 12,
1709 regs[fp_start_reg++]);
1710 }
1711 }
1712 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1713 {
1714 /* Allow some special function calls when skipping the
1715 prologue; GCC generates these before storing arguments to
1716 the stack. */
1717 CORE_ADDR dest = BranchDest (current_pc, insn);
1718
1719 if (skip_prologue_function (gdbarch, dest, 0))
1720 continue;
1721 else
1722 break;
1723 }
1724 else if ((insn & 0xf0000000) != 0xe0000000)
1725 break; /* Condition not true, exit early. */
1726 else if (arm_instruction_changes_pc (insn))
1727 /* Don't scan past anything that might change control flow. */
1728 break;
1729 else if (arm_instruction_restores_sp (insn))
1730 {
1731 /* Don't scan past the epilogue. */
1732 break;
1733 }
1734 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1735 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1736 /* Ignore block loads from the stack, potentially copying
1737 parameters from memory. */
1738 continue;
1739 else if ((insn & 0xfc500000) == 0xe4100000
1740 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1741 /* Similarly ignore single loads from the stack. */
1742 continue;
1743 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1744 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1745 register instead of the stack. */
1746 continue;
1747 else
1748 {
1749 /* The optimizer might shove anything into the prologue, if
1750 we build up cache (cache != NULL) from scanning prologue,
1751 we just skip what we don't recognize and scan further to
1752 make cache as complete as possible. However, if we skip
1753 prologue, we'll stop immediately on unrecognized
1754 instruction. */
1755 unrecognized_pc = current_pc;
1756 if (cache != NULL)
1757 continue;
1758 else
1759 break;
1760 }
1761 }
1762
1763 if (unrecognized_pc == 0)
1764 unrecognized_pc = current_pc;
1765
1766 if (cache)
1767 {
1768 int framereg, framesize;
1769
1770 /* The frame size is just the distance from the frame register
1771 to the original stack pointer. */
1772 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1773 {
1774 /* Frame pointer is fp. */
1775 framereg = ARM_FP_REGNUM;
1776 framesize = -regs[ARM_FP_REGNUM].k;
1777 }
1778 else
1779 {
1780 /* Try the stack pointer... this is a bit desperate. */
1781 framereg = ARM_SP_REGNUM;
1782 framesize = -regs[ARM_SP_REGNUM].k;
1783 }
1784
1785 cache->framereg = framereg;
1786 cache->framesize = framesize;
1787
1788 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1789 if (stack.find_reg (gdbarch, regno, &offset))
1790 cache->saved_regs[regno].set_addr (offset);
1791 }
1792
1793 arm_debug_printf ("Prologue scan stopped at %s",
1794 paddress (gdbarch, unrecognized_pc));
1795
1796 return unrecognized_pc;
1797 }
1798
1799 static void
1800 arm_scan_prologue (struct frame_info *this_frame,
1801 struct arm_prologue_cache *cache)
1802 {
1803 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1805 CORE_ADDR prologue_start, prologue_end;
1806 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1807 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1808 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
1809
1810 /* Assume there is no frame until proven otherwise. */
1811 cache->framereg = ARM_SP_REGNUM;
1812 cache->framesize = 0;
1813
1814 /* Check for Thumb prologue. */
1815 if (arm_frame_is_thumb (this_frame))
1816 {
1817 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1818 return;
1819 }
1820
1821 /* Find the function prologue. If we can't find the function in
1822 the symbol table, peek in the stack frame to find the PC. */
1823 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1824 &prologue_end))
1825 {
1826 /* One way to find the end of the prologue (which works well
1827 for unoptimized code) is to do the following:
1828
1829 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1830
1831 if (sal.line == 0)
1832 prologue_end = prev_pc;
1833 else if (sal.end < prologue_end)
1834 prologue_end = sal.end;
1835
1836 This mechanism is very accurate so long as the optimizer
1837 doesn't move any instructions from the function body into the
1838 prologue. If this happens, sal.end will be the last
1839 instruction in the first hunk of prologue code just before
1840 the first instruction that the scheduler has moved from
1841 the body to the prologue.
1842
1843 In order to make sure that we scan all of the prologue
1844 instructions, we use a slightly less accurate mechanism which
1845 may scan more than necessary. To help compensate for this
1846 lack of accuracy, the prologue scanning loop below contains
1847 several clauses which'll cause the loop to terminate early if
1848 an implausible prologue instruction is encountered.
1849
1850 The expression
1851
1852 prologue_start + 64
1853
1854 is a suitable endpoint since it accounts for the largest
1855 possible prologue plus up to five instructions inserted by
1856 the scheduler. */
1857
1858 if (prologue_end > prologue_start + 64)
1859 {
1860 prologue_end = prologue_start + 64; /* See above. */
1861 }
1862 }
1863 else
1864 {
1865 /* We have no symbol information. Our only option is to assume this
1866 function has a standard stack frame and the normal frame register.
1867 Then, we can find the value of our frame pointer on entrance to
1868 the callee (or at the present moment if this is the innermost frame).
1869 The value stored there should be the address of the stmfd + 8. */
1870 CORE_ADDR frame_loc;
1871 ULONGEST return_value;
1872
1873 /* AAPCS does not use a frame register, so we can abort here. */
1874 if (tdep->arm_abi == ARM_ABI_AAPCS)
1875 return;
1876
1877 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1878 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1879 &return_value))
1880 return;
1881 else
1882 {
1883 prologue_start = gdbarch_addr_bits_remove
1884 (gdbarch, return_value) - 8;
1885 prologue_end = prologue_start + 64; /* See above. */
1886 }
1887 }
1888
1889 if (prev_pc < prologue_end)
1890 prologue_end = prev_pc;
1891
1892 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1893 target_arm_instruction_reader ());
1894 }
1895
1896 static struct arm_prologue_cache *
1897 arm_make_prologue_cache (struct frame_info *this_frame)
1898 {
1899 int reg;
1900 struct arm_prologue_cache *cache;
1901 CORE_ADDR unwound_fp;
1902
1903 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1904 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1905
1906 arm_scan_prologue (this_frame, cache);
1907
1908 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1909 if (unwound_fp == 0)
1910 return cache;
1911
1912 cache->prev_sp = unwound_fp + cache->framesize;
1913
1914 /* Calculate actual addresses of saved registers using offsets
1915 determined by arm_scan_prologue. */
1916 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1917 if (cache->saved_regs[reg].is_addr ())
1918 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
1919 + cache->prev_sp);
1920
1921 return cache;
1922 }
1923
1924 /* Implementation of the stop_reason hook for arm_prologue frames. */
1925
1926 static enum unwind_stop_reason
1927 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1928 void **this_cache)
1929 {
1930 struct arm_prologue_cache *cache;
1931 CORE_ADDR pc;
1932
1933 if (*this_cache == NULL)
1934 *this_cache = arm_make_prologue_cache (this_frame);
1935 cache = (struct arm_prologue_cache *) *this_cache;
1936
1937 /* This is meant to halt the backtrace at "_start". */
1938 pc = get_frame_pc (this_frame);
1939 gdbarch *arch = get_frame_arch (this_frame);
1940 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (arch);
1941 if (pc <= tdep->lowest_pc)
1942 return UNWIND_OUTERMOST;
1943
1944 /* If we've hit a wall, stop. */
1945 if (cache->prev_sp == 0)
1946 return UNWIND_OUTERMOST;
1947
1948 return UNWIND_NO_REASON;
1949 }
1950
1951 /* Our frame ID for a normal frame is the current function's starting PC
1952 and the caller's SP when we were called. */
1953
1954 static void
1955 arm_prologue_this_id (struct frame_info *this_frame,
1956 void **this_cache,
1957 struct frame_id *this_id)
1958 {
1959 struct arm_prologue_cache *cache;
1960 struct frame_id id;
1961 CORE_ADDR pc, func;
1962
1963 if (*this_cache == NULL)
1964 *this_cache = arm_make_prologue_cache (this_frame);
1965 cache = (struct arm_prologue_cache *) *this_cache;
1966
1967 /* Use function start address as part of the frame ID. If we cannot
1968 identify the start address (due to missing symbol information),
1969 fall back to just using the current PC. */
1970 pc = get_frame_pc (this_frame);
1971 func = get_frame_func (this_frame);
1972 if (!func)
1973 func = pc;
1974
1975 id = frame_id_build (cache->prev_sp, func);
1976 *this_id = id;
1977 }
1978
1979 static struct value *
1980 arm_prologue_prev_register (struct frame_info *this_frame,
1981 void **this_cache,
1982 int prev_regnum)
1983 {
1984 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1985 struct arm_prologue_cache *cache;
1986
1987 if (*this_cache == NULL)
1988 *this_cache = arm_make_prologue_cache (this_frame);
1989 cache = (struct arm_prologue_cache *) *this_cache;
1990
1991 /* If we are asked to unwind the PC, then we need to return the LR
1992 instead. The prologue may save PC, but it will point into this
1993 frame's prologue, not the next frame's resume location. Also
1994 strip the saved T bit. A valid LR may have the low bit set, but
1995 a valid PC never does. */
1996 if (prev_regnum == ARM_PC_REGNUM)
1997 {
1998 CORE_ADDR lr;
1999
2000 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2001 return frame_unwind_got_constant (this_frame, prev_regnum,
2002 arm_addr_bits_remove (gdbarch, lr));
2003 }
2004
2005 /* SP is generally not saved to the stack, but this frame is
2006 identified by the next frame's stack pointer at the time of the call.
2007 The value was already reconstructed into PREV_SP. */
2008 if (prev_regnum == ARM_SP_REGNUM)
2009 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2010
2011 /* The CPSR may have been changed by the call instruction and by the
2012 called function. The only bit we can reconstruct is the T bit,
2013 by checking the low bit of LR as of the call. This is a reliable
2014 indicator of Thumb-ness except for some ARM v4T pre-interworking
2015 Thumb code, which could get away with a clear low bit as long as
2016 the called function did not use bx. Guess that all other
2017 bits are unchanged; the condition flags are presumably lost,
2018 but the processor status is likely valid. */
2019 if (prev_regnum == ARM_PS_REGNUM)
2020 {
2021 CORE_ADDR lr, cpsr;
2022 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2023
2024 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2025 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2026 if (IS_THUMB_ADDR (lr))
2027 cpsr |= t_bit;
2028 else
2029 cpsr &= ~t_bit;
2030 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2031 }
2032
2033 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2034 prev_regnum);
2035 }
2036
2037 static frame_unwind arm_prologue_unwind = {
2038 "arm prologue",
2039 NORMAL_FRAME,
2040 arm_prologue_unwind_stop_reason,
2041 arm_prologue_this_id,
2042 arm_prologue_prev_register,
2043 NULL,
2044 default_frame_sniffer
2045 };
2046
2047 /* Maintain a list of ARM exception table entries per objfile, similar to the
2048 list of mapping symbols. We only cache entries for standard ARM-defined
2049 personality routines; the cache will contain only the frame unwinding
2050 instructions associated with the entry (not the descriptors). */
2051
2052 struct arm_exidx_entry
2053 {
2054 CORE_ADDR addr;
2055 gdb_byte *entry;
2056
2057 bool operator< (const arm_exidx_entry &other) const
2058 {
2059 return addr < other.addr;
2060 }
2061 };
2062
2063 struct arm_exidx_data
2064 {
2065 std::vector<std::vector<arm_exidx_entry>> section_maps;
2066 };
2067
2068 /* Per-BFD key to store exception handling information. */
2069 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2070
2071 static struct obj_section *
2072 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2073 {
2074 struct obj_section *osect;
2075
2076 ALL_OBJFILE_OSECTIONS (objfile, osect)
2077 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2078 {
2079 bfd_vma start, size;
2080 start = bfd_section_vma (osect->the_bfd_section);
2081 size = bfd_section_size (osect->the_bfd_section);
2082
2083 if (start <= vma && vma < start + size)
2084 return osect;
2085 }
2086
2087 return NULL;
2088 }
2089
2090 /* Parse contents of exception table and exception index sections
2091 of OBJFILE, and fill in the exception table entry cache.
2092
2093 For each entry that refers to a standard ARM-defined personality
2094 routine, extract the frame unwinding instructions (from either
2095 the index or the table section). The unwinding instructions
2096 are normalized by:
2097 - extracting them from the rest of the table data
2098 - converting to host endianness
2099 - appending the implicit 0xb0 ("Finish") code
2100
2101 The extracted and normalized instructions are stored for later
2102 retrieval by the arm_find_exidx_entry routine. */
2103
2104 static void
2105 arm_exidx_new_objfile (struct objfile *objfile)
2106 {
2107 struct arm_exidx_data *data;
2108 asection *exidx, *extab;
2109 bfd_vma exidx_vma = 0, extab_vma = 0;
2110 LONGEST i;
2111
2112 /* If we've already touched this file, do nothing. */
2113 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2114 return;
2115
2116 /* Read contents of exception table and index. */
2117 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2118 gdb::byte_vector exidx_data;
2119 if (exidx)
2120 {
2121 exidx_vma = bfd_section_vma (exidx);
2122 exidx_data.resize (bfd_section_size (exidx));
2123
2124 if (!bfd_get_section_contents (objfile->obfd, exidx,
2125 exidx_data.data (), 0,
2126 exidx_data.size ()))
2127 return;
2128 }
2129
2130 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2131 gdb::byte_vector extab_data;
2132 if (extab)
2133 {
2134 extab_vma = bfd_section_vma (extab);
2135 extab_data.resize (bfd_section_size (extab));
2136
2137 if (!bfd_get_section_contents (objfile->obfd, extab,
2138 extab_data.data (), 0,
2139 extab_data.size ()))
2140 return;
2141 }
2142
2143 /* Allocate exception table data structure. */
2144 data = arm_exidx_data_key.emplace (objfile->obfd);
2145 data->section_maps.resize (objfile->obfd->section_count);
2146
2147 /* Fill in exception table. */
2148 for (i = 0; i < exidx_data.size () / 8; i++)
2149 {
2150 struct arm_exidx_entry new_exidx_entry;
2151 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2152 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2153 exidx_data.data () + i * 8 + 4);
2154 bfd_vma addr = 0, word = 0;
2155 int n_bytes = 0, n_words = 0;
2156 struct obj_section *sec;
2157 gdb_byte *entry = NULL;
2158
2159 /* Extract address of start of function. */
2160 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2161 idx += exidx_vma + i * 8;
2162
2163 /* Find section containing function and compute section offset. */
2164 sec = arm_obj_section_from_vma (objfile, idx);
2165 if (sec == NULL)
2166 continue;
2167 idx -= bfd_section_vma (sec->the_bfd_section);
2168
2169 /* Determine address of exception table entry. */
2170 if (val == 1)
2171 {
2172 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2173 }
2174 else if ((val & 0xff000000) == 0x80000000)
2175 {
2176 /* Exception table entry embedded in .ARM.exidx
2177 -- must be short form. */
2178 word = val;
2179 n_bytes = 3;
2180 }
2181 else if (!(val & 0x80000000))
2182 {
2183 /* Exception table entry in .ARM.extab. */
2184 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2185 addr += exidx_vma + i * 8 + 4;
2186
2187 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2188 {
2189 word = bfd_h_get_32 (objfile->obfd,
2190 extab_data.data () + addr - extab_vma);
2191 addr += 4;
2192
2193 if ((word & 0xff000000) == 0x80000000)
2194 {
2195 /* Short form. */
2196 n_bytes = 3;
2197 }
2198 else if ((word & 0xff000000) == 0x81000000
2199 || (word & 0xff000000) == 0x82000000)
2200 {
2201 /* Long form. */
2202 n_bytes = 2;
2203 n_words = ((word >> 16) & 0xff);
2204 }
2205 else if (!(word & 0x80000000))
2206 {
2207 bfd_vma pers;
2208 struct obj_section *pers_sec;
2209 int gnu_personality = 0;
2210
2211 /* Custom personality routine. */
2212 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2213 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2214
2215 /* Check whether we've got one of the variants of the
2216 GNU personality routines. */
2217 pers_sec = arm_obj_section_from_vma (objfile, pers);
2218 if (pers_sec)
2219 {
2220 static const char *personality[] =
2221 {
2222 "__gcc_personality_v0",
2223 "__gxx_personality_v0",
2224 "__gcj_personality_v0",
2225 "__gnu_objc_personality_v0",
2226 NULL
2227 };
2228
2229 CORE_ADDR pc = pers + pers_sec->offset ();
2230 int k;
2231
2232 for (k = 0; personality[k]; k++)
2233 if (lookup_minimal_symbol_by_pc_name
2234 (pc, personality[k], objfile))
2235 {
2236 gnu_personality = 1;
2237 break;
2238 }
2239 }
2240
2241 /* If so, the next word contains a word count in the high
2242 byte, followed by the same unwind instructions as the
2243 pre-defined forms. */
2244 if (gnu_personality
2245 && addr + 4 <= extab_vma + extab_data.size ())
2246 {
2247 word = bfd_h_get_32 (objfile->obfd,
2248 (extab_data.data ()
2249 + addr - extab_vma));
2250 addr += 4;
2251 n_bytes = 3;
2252 n_words = ((word >> 24) & 0xff);
2253 }
2254 }
2255 }
2256 }
2257
2258 /* Sanity check address. */
2259 if (n_words)
2260 if (addr < extab_vma
2261 || addr + 4 * n_words > extab_vma + extab_data.size ())
2262 n_words = n_bytes = 0;
2263
2264 /* The unwind instructions reside in WORD (only the N_BYTES least
2265 significant bytes are valid), followed by N_WORDS words in the
2266 extab section starting at ADDR. */
2267 if (n_bytes || n_words)
2268 {
2269 gdb_byte *p = entry
2270 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2271 n_bytes + n_words * 4 + 1);
2272
2273 while (n_bytes--)
2274 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2275
2276 while (n_words--)
2277 {
2278 word = bfd_h_get_32 (objfile->obfd,
2279 extab_data.data () + addr - extab_vma);
2280 addr += 4;
2281
2282 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2283 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2284 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2285 *p++ = (gdb_byte) (word & 0xff);
2286 }
2287
2288 /* Implied "Finish" to terminate the list. */
2289 *p++ = 0xb0;
2290 }
2291
2292 /* Push entry onto vector. They are guaranteed to always
2293 appear in order of increasing addresses. */
2294 new_exidx_entry.addr = idx;
2295 new_exidx_entry.entry = entry;
2296 data->section_maps[sec->the_bfd_section->index].push_back
2297 (new_exidx_entry);
2298 }
2299 }
2300
2301 /* Search for the exception table entry covering MEMADDR. If one is found,
2302 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2303 set *START to the start of the region covered by this entry. */
2304
2305 static gdb_byte *
2306 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2307 {
2308 struct obj_section *sec;
2309
2310 sec = find_pc_section (memaddr);
2311 if (sec != NULL)
2312 {
2313 struct arm_exidx_data *data;
2314 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2315
2316 data = arm_exidx_data_key.get (sec->objfile->obfd);
2317 if (data != NULL)
2318 {
2319 std::vector<arm_exidx_entry> &map
2320 = data->section_maps[sec->the_bfd_section->index];
2321 if (!map.empty ())
2322 {
2323 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2324
2325 /* std::lower_bound finds the earliest ordered insertion
2326 point. If the following symbol starts at this exact
2327 address, we use that; otherwise, the preceding
2328 exception table entry covers this address. */
2329 if (idx < map.end ())
2330 {
2331 if (idx->addr == map_key.addr)
2332 {
2333 if (start)
2334 *start = idx->addr + sec->addr ();
2335 return idx->entry;
2336 }
2337 }
2338
2339 if (idx > map.begin ())
2340 {
2341 idx = idx - 1;
2342 if (start)
2343 *start = idx->addr + sec->addr ();
2344 return idx->entry;
2345 }
2346 }
2347 }
2348 }
2349
2350 return NULL;
2351 }
2352
2353 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2354 instruction list from the ARM exception table entry ENTRY, allocate and
2355 return a prologue cache structure describing how to unwind this frame.
2356
2357 Return NULL if the unwinding instruction list contains a "spare",
2358 "reserved" or "refuse to unwind" instruction as defined in section
2359 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2360 for the ARM Architecture" document. */
2361
2362 static struct arm_prologue_cache *
2363 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2364 {
2365 CORE_ADDR vsp = 0;
2366 int vsp_valid = 0;
2367
2368 struct arm_prologue_cache *cache;
2369 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2370 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2371
2372 for (;;)
2373 {
2374 gdb_byte insn;
2375
2376 /* Whenever we reload SP, we actually have to retrieve its
2377 actual value in the current frame. */
2378 if (!vsp_valid)
2379 {
2380 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2381 {
2382 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2383 vsp = get_frame_register_unsigned (this_frame, reg);
2384 }
2385 else
2386 {
2387 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2388 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2389 }
2390
2391 vsp_valid = 1;
2392 }
2393
2394 /* Decode next unwind instruction. */
2395 insn = *entry++;
2396
2397 if ((insn & 0xc0) == 0)
2398 {
2399 int offset = insn & 0x3f;
2400 vsp += (offset << 2) + 4;
2401 }
2402 else if ((insn & 0xc0) == 0x40)
2403 {
2404 int offset = insn & 0x3f;
2405 vsp -= (offset << 2) + 4;
2406 }
2407 else if ((insn & 0xf0) == 0x80)
2408 {
2409 int mask = ((insn & 0xf) << 8) | *entry++;
2410 int i;
2411
2412 /* The special case of an all-zero mask identifies
2413 "Refuse to unwind". We return NULL to fall back
2414 to the prologue analyzer. */
2415 if (mask == 0)
2416 return NULL;
2417
2418 /* Pop registers r4..r15 under mask. */
2419 for (i = 0; i < 12; i++)
2420 if (mask & (1 << i))
2421 {
2422 cache->saved_regs[4 + i].set_addr (vsp);
2423 vsp += 4;
2424 }
2425
2426 /* Special-case popping SP -- we need to reload vsp. */
2427 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2428 vsp_valid = 0;
2429 }
2430 else if ((insn & 0xf0) == 0x90)
2431 {
2432 int reg = insn & 0xf;
2433
2434 /* Reserved cases. */
2435 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2436 return NULL;
2437
2438 /* Set SP from another register and mark VSP for reload. */
2439 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2440 vsp_valid = 0;
2441 }
2442 else if ((insn & 0xf0) == 0xa0)
2443 {
2444 int count = insn & 0x7;
2445 int pop_lr = (insn & 0x8) != 0;
2446 int i;
2447
2448 /* Pop r4..r[4+count]. */
2449 for (i = 0; i <= count; i++)
2450 {
2451 cache->saved_regs[4 + i].set_addr (vsp);
2452 vsp += 4;
2453 }
2454
2455 /* If indicated by flag, pop LR as well. */
2456 if (pop_lr)
2457 {
2458 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2459 vsp += 4;
2460 }
2461 }
2462 else if (insn == 0xb0)
2463 {
2464 /* We could only have updated PC by popping into it; if so, it
2465 will show up as address. Otherwise, copy LR into PC. */
2466 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2467 cache->saved_regs[ARM_PC_REGNUM]
2468 = cache->saved_regs[ARM_LR_REGNUM];
2469
2470 /* We're done. */
2471 break;
2472 }
2473 else if (insn == 0xb1)
2474 {
2475 int mask = *entry++;
2476 int i;
2477
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask == 0 || mask >= 16)
2480 return NULL;
2481
2482 /* Pop r0..r3 under mask. */
2483 for (i = 0; i < 4; i++)
2484 if (mask & (1 << i))
2485 {
2486 cache->saved_regs[i].set_addr (vsp);
2487 vsp += 4;
2488 }
2489 }
2490 else if (insn == 0xb2)
2491 {
2492 ULONGEST offset = 0;
2493 unsigned shift = 0;
2494
2495 do
2496 {
2497 offset |= (*entry & 0x7f) << shift;
2498 shift += 7;
2499 }
2500 while (*entry++ & 0x80);
2501
2502 vsp += 0x204 + (offset << 2);
2503 }
2504 else if (insn == 0xb3)
2505 {
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2508 int i;
2509
2510 /* Only registers D0..D15 are valid here. */
2511 if (start + count >= 16)
2512 return NULL;
2513
2514 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2515 for (i = 0; i <= count; i++)
2516 {
2517 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2518 vsp += 8;
2519 }
2520
2521 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2522 vsp += 4;
2523 }
2524 else if ((insn & 0xf8) == 0xb8)
2525 {
2526 int count = insn & 0x7;
2527 int i;
2528
2529 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2530 for (i = 0; i <= count; i++)
2531 {
2532 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2533 vsp += 8;
2534 }
2535
2536 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2537 vsp += 4;
2538 }
2539 else if (insn == 0xc6)
2540 {
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2543 int i;
2544
2545 /* Only registers WR0..WR15 are valid. */
2546 if (start + count >= 16)
2547 return NULL;
2548
2549 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2550 for (i = 0; i <= count; i++)
2551 {
2552 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2553 vsp += 8;
2554 }
2555 }
2556 else if (insn == 0xc7)
2557 {
2558 int mask = *entry++;
2559 int i;
2560
2561 /* All-zero mask and mask >= 16 is "spare". */
2562 if (mask == 0 || mask >= 16)
2563 return NULL;
2564
2565 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2566 for (i = 0; i < 4; i++)
2567 if (mask & (1 << i))
2568 {
2569 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2570 vsp += 4;
2571 }
2572 }
2573 else if ((insn & 0xf8) == 0xc0)
2574 {
2575 int count = insn & 0x7;
2576 int i;
2577
2578 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2579 for (i = 0; i <= count; i++)
2580 {
2581 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2582 vsp += 8;
2583 }
2584 }
2585 else if (insn == 0xc8)
2586 {
2587 int start = *entry >> 4;
2588 int count = (*entry++) & 0xf;
2589 int i;
2590
2591 /* Only registers D0..D31 are valid. */
2592 if (start + count >= 16)
2593 return NULL;
2594
2595 /* Pop VFP double-precision registers
2596 D[16+start]..D[16+start+count]. */
2597 for (i = 0; i <= count; i++)
2598 {
2599 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2600 vsp += 8;
2601 }
2602 }
2603 else if (insn == 0xc9)
2604 {
2605 int start = *entry >> 4;
2606 int count = (*entry++) & 0xf;
2607 int i;
2608
2609 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2610 for (i = 0; i <= count; i++)
2611 {
2612 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2613 vsp += 8;
2614 }
2615 }
2616 else if ((insn & 0xf8) == 0xd0)
2617 {
2618 int count = insn & 0x7;
2619 int i;
2620
2621 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2622 for (i = 0; i <= count; i++)
2623 {
2624 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2625 vsp += 8;
2626 }
2627 }
2628 else
2629 {
2630 /* Everything else is "spare". */
2631 return NULL;
2632 }
2633 }
2634
2635 /* If we restore SP from a register, assume this was the frame register.
2636 Otherwise just fall back to SP as frame register. */
2637 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2638 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2639 else
2640 cache->framereg = ARM_SP_REGNUM;
2641
2642 /* Determine offset to previous frame. */
2643 cache->framesize
2644 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2645
2646 /* We already got the previous SP. */
2647 cache->prev_sp = vsp;
2648
2649 return cache;
2650 }
2651
2652 /* Unwinding via ARM exception table entries. Note that the sniffer
2653 already computes a filled-in prologue cache, which is then used
2654 with the same arm_prologue_this_id and arm_prologue_prev_register
2655 routines also used for prologue-parsing based unwinding. */
2656
2657 static int
2658 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2659 struct frame_info *this_frame,
2660 void **this_prologue_cache)
2661 {
2662 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2663 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2664 CORE_ADDR addr_in_block, exidx_region, func_start;
2665 struct arm_prologue_cache *cache;
2666 gdb_byte *entry;
2667
2668 /* See if we have an ARM exception table entry covering this address. */
2669 addr_in_block = get_frame_address_in_block (this_frame);
2670 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2671 if (!entry)
2672 return 0;
2673
2674 /* The ARM exception table does not describe unwind information
2675 for arbitrary PC values, but is guaranteed to be correct only
2676 at call sites. We have to decide here whether we want to use
2677 ARM exception table information for this frame, or fall back
2678 to using prologue parsing. (Note that if we have DWARF CFI,
2679 this sniffer isn't even called -- CFI is always preferred.)
2680
2681 Before we make this decision, however, we check whether we
2682 actually have *symbol* information for the current frame.
2683 If not, prologue parsing would not work anyway, so we might
2684 as well use the exception table and hope for the best. */
2685 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2686 {
2687 int exc_valid = 0;
2688
2689 /* If the next frame is "normal", we are at a call site in this
2690 frame, so exception information is guaranteed to be valid. */
2691 if (get_next_frame (this_frame)
2692 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2693 exc_valid = 1;
2694
2695 /* We also assume exception information is valid if we're currently
2696 blocked in a system call. The system library is supposed to
2697 ensure this, so that e.g. pthread cancellation works. */
2698 if (arm_frame_is_thumb (this_frame))
2699 {
2700 ULONGEST insn;
2701
2702 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2703 2, byte_order_for_code, &insn)
2704 && (insn & 0xff00) == 0xdf00 /* svc */)
2705 exc_valid = 1;
2706 }
2707 else
2708 {
2709 ULONGEST insn;
2710
2711 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2712 4, byte_order_for_code, &insn)
2713 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2714 exc_valid = 1;
2715 }
2716
2717 /* Bail out if we don't know that exception information is valid. */
2718 if (!exc_valid)
2719 return 0;
2720
2721 /* The ARM exception index does not mark the *end* of the region
2722 covered by the entry, and some functions will not have any entry.
2723 To correctly recognize the end of the covered region, the linker
2724 should have inserted dummy records with a CANTUNWIND marker.
2725
2726 Unfortunately, current versions of GNU ld do not reliably do
2727 this, and thus we may have found an incorrect entry above.
2728 As a (temporary) sanity check, we only use the entry if it
2729 lies *within* the bounds of the function. Note that this check
2730 might reject perfectly valid entries that just happen to cover
2731 multiple functions; therefore this check ought to be removed
2732 once the linker is fixed. */
2733 if (func_start > exidx_region)
2734 return 0;
2735 }
2736
2737 /* Decode the list of unwinding instructions into a prologue cache.
2738 Note that this may fail due to e.g. a "refuse to unwind" code. */
2739 cache = arm_exidx_fill_cache (this_frame, entry);
2740 if (!cache)
2741 return 0;
2742
2743 *this_prologue_cache = cache;
2744 return 1;
2745 }
2746
2747 struct frame_unwind arm_exidx_unwind = {
2748 "arm exidx",
2749 NORMAL_FRAME,
2750 default_frame_unwind_stop_reason,
2751 arm_prologue_this_id,
2752 arm_prologue_prev_register,
2753 NULL,
2754 arm_exidx_unwind_sniffer
2755 };
2756
2757 static struct arm_prologue_cache *
2758 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2759 {
2760 struct arm_prologue_cache *cache;
2761 int reg;
2762
2763 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2764 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2765
2766 /* Still rely on the offset calculated from prologue. */
2767 arm_scan_prologue (this_frame, cache);
2768
2769 /* Since we are in epilogue, the SP has been restored. */
2770 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2771
2772 /* Calculate actual addresses of saved registers using offsets
2773 determined by arm_scan_prologue. */
2774 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2775 if (cache->saved_regs[reg].is_addr ())
2776 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
2777 + cache->prev_sp);
2778
2779 return cache;
2780 }
2781
2782 /* Implementation of function hook 'this_id' in
2783 'struct frame_uwnind' for epilogue unwinder. */
2784
2785 static void
2786 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2787 void **this_cache,
2788 struct frame_id *this_id)
2789 {
2790 struct arm_prologue_cache *cache;
2791 CORE_ADDR pc, func;
2792
2793 if (*this_cache == NULL)
2794 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2795 cache = (struct arm_prologue_cache *) *this_cache;
2796
2797 /* Use function start address as part of the frame ID. If we cannot
2798 identify the start address (due to missing symbol information),
2799 fall back to just using the current PC. */
2800 pc = get_frame_pc (this_frame);
2801 func = get_frame_func (this_frame);
2802 if (func == 0)
2803 func = pc;
2804
2805 (*this_id) = frame_id_build (cache->prev_sp, pc);
2806 }
2807
2808 /* Implementation of function hook 'prev_register' in
2809 'struct frame_uwnind' for epilogue unwinder. */
2810
2811 static struct value *
2812 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2813 void **this_cache, int regnum)
2814 {
2815 if (*this_cache == NULL)
2816 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2817
2818 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2819 }
2820
2821 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2822 CORE_ADDR pc);
2823 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2824 CORE_ADDR pc);
2825
2826 /* Implementation of function hook 'sniffer' in
2827 'struct frame_uwnind' for epilogue unwinder. */
2828
2829 static int
2830 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2831 struct frame_info *this_frame,
2832 void **this_prologue_cache)
2833 {
2834 if (frame_relative_level (this_frame) == 0)
2835 {
2836 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2837 CORE_ADDR pc = get_frame_pc (this_frame);
2838
2839 if (arm_frame_is_thumb (this_frame))
2840 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2841 else
2842 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2843 }
2844 else
2845 return 0;
2846 }
2847
2848 /* Frame unwinder from epilogue. */
2849
2850 static const struct frame_unwind arm_epilogue_frame_unwind =
2851 {
2852 "arm epilogue",
2853 NORMAL_FRAME,
2854 default_frame_unwind_stop_reason,
2855 arm_epilogue_frame_this_id,
2856 arm_epilogue_frame_prev_register,
2857 NULL,
2858 arm_epilogue_frame_sniffer,
2859 };
2860
2861 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2862 trampoline, return the target PC. Otherwise return 0.
2863
2864 void call0a (char c, short s, int i, long l) {}
2865
2866 int main (void)
2867 {
2868 (*pointer_to_call0a) (c, s, i, l);
2869 }
2870
2871 Instead of calling a stub library function _call_via_xx (xx is
2872 the register name), GCC may inline the trampoline in the object
2873 file as below (register r2 has the address of call0a).
2874
2875 .global main
2876 .type main, %function
2877 ...
2878 bl .L1
2879 ...
2880 .size main, .-main
2881
2882 .L1:
2883 bx r2
2884
2885 The trampoline 'bx r2' doesn't belong to main. */
2886
2887 static CORE_ADDR
2888 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2889 {
2890 /* The heuristics of recognizing such trampoline is that FRAME is
2891 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2892 if (arm_frame_is_thumb (frame))
2893 {
2894 gdb_byte buf[2];
2895
2896 if (target_read_memory (pc, buf, 2) == 0)
2897 {
2898 struct gdbarch *gdbarch = get_frame_arch (frame);
2899 enum bfd_endian byte_order_for_code
2900 = gdbarch_byte_order_for_code (gdbarch);
2901 uint16_t insn
2902 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2903
2904 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2905 {
2906 CORE_ADDR dest
2907 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2908
2909 /* Clear the LSB so that gdb core sets step-resume
2910 breakpoint at the right address. */
2911 return UNMAKE_THUMB_ADDR (dest);
2912 }
2913 }
2914 }
2915
2916 return 0;
2917 }
2918
2919 static struct arm_prologue_cache *
2920 arm_make_stub_cache (struct frame_info *this_frame)
2921 {
2922 struct arm_prologue_cache *cache;
2923
2924 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2925 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2926
2927 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2928
2929 return cache;
2930 }
2931
2932 /* Our frame ID for a stub frame is the current SP and LR. */
2933
2934 static void
2935 arm_stub_this_id (struct frame_info *this_frame,
2936 void **this_cache,
2937 struct frame_id *this_id)
2938 {
2939 struct arm_prologue_cache *cache;
2940
2941 if (*this_cache == NULL)
2942 *this_cache = arm_make_stub_cache (this_frame);
2943 cache = (struct arm_prologue_cache *) *this_cache;
2944
2945 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2946 }
2947
2948 static int
2949 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2950 struct frame_info *this_frame,
2951 void **this_prologue_cache)
2952 {
2953 CORE_ADDR addr_in_block;
2954 gdb_byte dummy[4];
2955 CORE_ADDR pc, start_addr;
2956 const char *name;
2957
2958 addr_in_block = get_frame_address_in_block (this_frame);
2959 pc = get_frame_pc (this_frame);
2960 if (in_plt_section (addr_in_block)
2961 /* We also use the stub winder if the target memory is unreadable
2962 to avoid having the prologue unwinder trying to read it. */
2963 || target_read_memory (pc, dummy, 4) != 0)
2964 return 1;
2965
2966 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2967 && arm_skip_bx_reg (this_frame, pc) != 0)
2968 return 1;
2969
2970 return 0;
2971 }
2972
2973 struct frame_unwind arm_stub_unwind = {
2974 "arm stub",
2975 NORMAL_FRAME,
2976 default_frame_unwind_stop_reason,
2977 arm_stub_this_id,
2978 arm_prologue_prev_register,
2979 NULL,
2980 arm_stub_unwind_sniffer
2981 };
2982
2983 /* Put here the code to store, into CACHE->saved_regs, the addresses
2984 of the saved registers of frame described by THIS_FRAME. CACHE is
2985 returned. */
2986
2987 static struct arm_prologue_cache *
2988 arm_m_exception_cache (struct frame_info *this_frame)
2989 {
2990 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2991 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2992 struct arm_prologue_cache *cache;
2993 CORE_ADDR lr;
2994 CORE_ADDR sp;
2995 CORE_ADDR unwound_sp;
2996 LONGEST xpsr;
2997 uint32_t exc_return;
2998 uint32_t process_stack_used;
2999 uint32_t extended_frame_used;
3000 uint32_t secure_stack_used;
3001
3002 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3003 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3004
3005 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3006 describes which bits in LR that define which stack was used prior
3007 to the exception and if FPU is used (causing extended stack frame). */
3008
3009 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3010 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3011
3012 /* Check EXC_RETURN indicator bits. */
3013 exc_return = (((lr >> 28) & 0xf) == 0xf);
3014
3015 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3016 process_stack_used = ((lr & (1 << 2)) != 0);
3017 if (exc_return && process_stack_used)
3018 {
3019 /* Thread (process) stack used.
3020 Potentially this could be other register defined by target, but PSP
3021 can be considered a standard name for the "Process Stack Pointer".
3022 To be fully aware of system registers like MSP and PSP, these could
3023 be added to a separate XML arm-m-system-profile that is valid for
3024 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3025 corefile off-line, then these registers must be defined by GDB,
3026 and also be included in the corefile regsets. */
3027
3028 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3029 if (psp_regnum == -1)
3030 {
3031 /* Thread (process) stack could not be fetched,
3032 give warning and exit. */
3033
3034 warning (_("no PSP thread stack unwinding supported."));
3035
3036 /* Terminate any further stack unwinding by refer to self. */
3037 cache->prev_sp = sp;
3038 return cache;
3039 }
3040 else
3041 {
3042 /* Thread (process) stack used, use PSP as SP. */
3043 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3044 }
3045 }
3046 else
3047 {
3048 /* Main stack used, use MSP as SP. */
3049 unwound_sp = sp;
3050 }
3051
3052 /* The hardware saves eight 32-bit words, comprising xPSR,
3053 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3054 "B1.5.6 Exception entry behavior" in
3055 "ARMv7-M Architecture Reference Manual". */
3056 cache->saved_regs[0].set_addr (unwound_sp);
3057 cache->saved_regs[1].set_addr (unwound_sp + 4);
3058 cache->saved_regs[2].set_addr (unwound_sp + 8);
3059 cache->saved_regs[3].set_addr (unwound_sp + 12);
3060 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + 16);
3061 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 20);
3062 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 24);
3063 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 28);
3064
3065 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3066 type used. */
3067 extended_frame_used = ((lr & (1 << 4)) == 0);
3068 if (exc_return && extended_frame_used)
3069 {
3070 int i;
3071 int fpu_regs_stack_offset;
3072
3073 /* This code does not take into account the lazy stacking, see "Lazy
3074 context save of FP state", in B1.5.7, also ARM AN298, supported
3075 by Cortex-M4F architecture.
3076 To fully handle this the FPCCR register (Floating-point Context
3077 Control Register) needs to be read out and the bits ASPEN and LSPEN
3078 could be checked to setup correct lazy stacked FP registers.
3079 This register is located at address 0xE000EF34. */
3080
3081 /* Extended stack frame type used. */
3082 fpu_regs_stack_offset = unwound_sp + 0x20;
3083 for (i = 0; i < 16; i++)
3084 {
3085 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3086 fpu_regs_stack_offset += 4;
3087 }
3088 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + 0x60);
3089
3090 /* Offset 0x64 is reserved. */
3091 cache->prev_sp = unwound_sp + 0x68;
3092 }
3093 else
3094 {
3095 /* Standard stack frame type used. */
3096 cache->prev_sp = unwound_sp + 0x20;
3097 }
3098
3099 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3100 secure_stack_used = ((lr & (1 << 6)) != 0);
3101 if (exc_return && secure_stack_used)
3102 {
3103 /* ARMv8-M Exception and interrupt handling is not considered here.
3104 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3105 the Secure or Non-secure stack was used. To separate Secure and
3106 Non-secure stacks, processors that are based on the ARMv8-M
3107 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3108 In addition, a stack limit feature is provided using stack limit
3109 registers (accessible using MSR and MRS instructions) in Privileged
3110 level. */
3111 }
3112
3113 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3114 aligner between the top of the 32-byte stack frame and the
3115 previous context's stack pointer. */
3116 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3117 && (xpsr & (1 << 9)) != 0)
3118 cache->prev_sp += 4;
3119
3120 return cache;
3121 }
3122
3123 /* Implementation of function hook 'this_id' in
3124 'struct frame_uwnind'. */
3125
3126 static void
3127 arm_m_exception_this_id (struct frame_info *this_frame,
3128 void **this_cache,
3129 struct frame_id *this_id)
3130 {
3131 struct arm_prologue_cache *cache;
3132
3133 if (*this_cache == NULL)
3134 *this_cache = arm_m_exception_cache (this_frame);
3135 cache = (struct arm_prologue_cache *) *this_cache;
3136
3137 /* Our frame ID for a stub frame is the current SP and LR. */
3138 *this_id = frame_id_build (cache->prev_sp,
3139 get_frame_pc (this_frame));
3140 }
3141
3142 /* Implementation of function hook 'prev_register' in
3143 'struct frame_uwnind'. */
3144
3145 static struct value *
3146 arm_m_exception_prev_register (struct frame_info *this_frame,
3147 void **this_cache,
3148 int prev_regnum)
3149 {
3150 struct arm_prologue_cache *cache;
3151
3152 if (*this_cache == NULL)
3153 *this_cache = arm_m_exception_cache (this_frame);
3154 cache = (struct arm_prologue_cache *) *this_cache;
3155
3156 /* The value was already reconstructed into PREV_SP. */
3157 if (prev_regnum == ARM_SP_REGNUM)
3158 return frame_unwind_got_constant (this_frame, prev_regnum,
3159 cache->prev_sp);
3160
3161 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3162 prev_regnum);
3163 }
3164
3165 /* Implementation of function hook 'sniffer' in
3166 'struct frame_uwnind'. */
3167
3168 static int
3169 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3170 struct frame_info *this_frame,
3171 void **this_prologue_cache)
3172 {
3173 CORE_ADDR this_pc = get_frame_pc (this_frame);
3174
3175 /* No need to check is_m; this sniffer is only registered for
3176 M-profile architectures. */
3177
3178 /* Check if exception frame returns to a magic PC value. */
3179 return arm_m_addr_is_magic (this_pc);
3180 }
3181
3182 /* Frame unwinder for M-profile exceptions. */
3183
3184 struct frame_unwind arm_m_exception_unwind =
3185 {
3186 "arm m exception",
3187 SIGTRAMP_FRAME,
3188 default_frame_unwind_stop_reason,
3189 arm_m_exception_this_id,
3190 arm_m_exception_prev_register,
3191 NULL,
3192 arm_m_exception_unwind_sniffer
3193 };
3194
3195 static CORE_ADDR
3196 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3197 {
3198 struct arm_prologue_cache *cache;
3199
3200 if (*this_cache == NULL)
3201 *this_cache = arm_make_prologue_cache (this_frame);
3202 cache = (struct arm_prologue_cache *) *this_cache;
3203
3204 return cache->prev_sp - cache->framesize;
3205 }
3206
3207 struct frame_base arm_normal_base = {
3208 &arm_prologue_unwind,
3209 arm_normal_frame_base,
3210 arm_normal_frame_base,
3211 arm_normal_frame_base
3212 };
3213
3214 static struct value *
3215 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3216 int regnum)
3217 {
3218 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3219 CORE_ADDR lr, cpsr;
3220 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3221
3222 switch (regnum)
3223 {
3224 case ARM_PC_REGNUM:
3225 /* The PC is normally copied from the return column, which
3226 describes saves of LR. However, that version may have an
3227 extra bit set to indicate Thumb state. The bit is not
3228 part of the PC. */
3229 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3230 return frame_unwind_got_constant (this_frame, regnum,
3231 arm_addr_bits_remove (gdbarch, lr));
3232
3233 case ARM_PS_REGNUM:
3234 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3235 cpsr = get_frame_register_unsigned (this_frame, regnum);
3236 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3237 if (IS_THUMB_ADDR (lr))
3238 cpsr |= t_bit;
3239 else
3240 cpsr &= ~t_bit;
3241 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3242
3243 default:
3244 internal_error (__FILE__, __LINE__,
3245 _("Unexpected register %d"), regnum);
3246 }
3247 }
3248
3249 static void
3250 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3251 struct dwarf2_frame_state_reg *reg,
3252 struct frame_info *this_frame)
3253 {
3254 switch (regnum)
3255 {
3256 case ARM_PC_REGNUM:
3257 case ARM_PS_REGNUM:
3258 reg->how = DWARF2_FRAME_REG_FN;
3259 reg->loc.fn = arm_dwarf2_prev_register;
3260 break;
3261 case ARM_SP_REGNUM:
3262 reg->how = DWARF2_FRAME_REG_CFA;
3263 break;
3264 }
3265 }
3266
3267 /* Implement the stack_frame_destroyed_p gdbarch method. */
3268
3269 static int
3270 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3271 {
3272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3273 unsigned int insn, insn2;
3274 int found_return = 0, found_stack_adjust = 0;
3275 CORE_ADDR func_start, func_end;
3276 CORE_ADDR scan_pc;
3277 gdb_byte buf[4];
3278
3279 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3280 return 0;
3281
3282 /* The epilogue is a sequence of instructions along the following lines:
3283
3284 - add stack frame size to SP or FP
3285 - [if frame pointer used] restore SP from FP
3286 - restore registers from SP [may include PC]
3287 - a return-type instruction [if PC wasn't already restored]
3288
3289 In a first pass, we scan forward from the current PC and verify the
3290 instructions we find as compatible with this sequence, ending in a
3291 return instruction.
3292
3293 However, this is not sufficient to distinguish indirect function calls
3294 within a function from indirect tail calls in the epilogue in some cases.
3295 Therefore, if we didn't already find any SP-changing instruction during
3296 forward scan, we add a backward scanning heuristic to ensure we actually
3297 are in the epilogue. */
3298
3299 scan_pc = pc;
3300 while (scan_pc < func_end && !found_return)
3301 {
3302 if (target_read_memory (scan_pc, buf, 2))
3303 break;
3304
3305 scan_pc += 2;
3306 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3307
3308 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3309 found_return = 1;
3310 else if (insn == 0x46f7) /* mov pc, lr */
3311 found_return = 1;
3312 else if (thumb_instruction_restores_sp (insn))
3313 {
3314 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3315 found_return = 1;
3316 }
3317 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3318 {
3319 if (target_read_memory (scan_pc, buf, 2))
3320 break;
3321
3322 scan_pc += 2;
3323 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3324
3325 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3326 {
3327 if (insn2 & 0x8000) /* <registers> include PC. */
3328 found_return = 1;
3329 }
3330 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3331 && (insn2 & 0x0fff) == 0x0b04)
3332 {
3333 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3334 found_return = 1;
3335 }
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 ;
3339 else
3340 break;
3341 }
3342 else
3343 break;
3344 }
3345
3346 if (!found_return)
3347 return 0;
3348
3349 /* Since any instruction in the epilogue sequence, with the possible
3350 exception of return itself, updates the stack pointer, we need to
3351 scan backwards for at most one instruction. Try either a 16-bit or
3352 a 32-bit instruction. This is just a heuristic, so we do not worry
3353 too much about false positives. */
3354
3355 if (pc - 4 < func_start)
3356 return 0;
3357 if (target_read_memory (pc - 4, buf, 4))
3358 return 0;
3359
3360 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3361 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3362
3363 if (thumb_instruction_restores_sp (insn2))
3364 found_stack_adjust = 1;
3365 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3366 found_stack_adjust = 1;
3367 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3368 && (insn2 & 0x0fff) == 0x0b04)
3369 found_stack_adjust = 1;
3370 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3371 && (insn2 & 0x0e00) == 0x0a00)
3372 found_stack_adjust = 1;
3373
3374 return found_stack_adjust;
3375 }
3376
3377 static int
3378 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3379 {
3380 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3381 unsigned int insn;
3382 int found_return;
3383 CORE_ADDR func_start, func_end;
3384
3385 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3386 return 0;
3387
3388 /* We are in the epilogue if the previous instruction was a stack
3389 adjustment and the next instruction is a possible return (bx, mov
3390 pc, or pop). We could have to scan backwards to find the stack
3391 adjustment, or forwards to find the return, but this is a decent
3392 approximation. First scan forwards. */
3393
3394 found_return = 0;
3395 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3396 if (bits (insn, 28, 31) != INST_NV)
3397 {
3398 if ((insn & 0x0ffffff0) == 0x012fff10)
3399 /* BX. */
3400 found_return = 1;
3401 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3402 /* MOV PC. */
3403 found_return = 1;
3404 else if ((insn & 0x0fff0000) == 0x08bd0000
3405 && (insn & 0x0000c000) != 0)
3406 /* POP (LDMIA), including PC or LR. */
3407 found_return = 1;
3408 }
3409
3410 if (!found_return)
3411 return 0;
3412
3413 /* Scan backwards. This is just a heuristic, so do not worry about
3414 false positives from mode changes. */
3415
3416 if (pc < func_start + 4)
3417 return 0;
3418
3419 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3420 if (arm_instruction_restores_sp (insn))
3421 return 1;
3422
3423 return 0;
3424 }
3425
3426 /* Implement the stack_frame_destroyed_p gdbarch method. */
3427
3428 static int
3429 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3430 {
3431 if (arm_pc_is_thumb (gdbarch, pc))
3432 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3433 else
3434 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3435 }
3436
3437 /* When arguments must be pushed onto the stack, they go on in reverse
3438 order. The code below implements a FILO (stack) to do this. */
3439
3440 struct stack_item
3441 {
3442 int len;
3443 struct stack_item *prev;
3444 gdb_byte *data;
3445 };
3446
3447 static struct stack_item *
3448 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3449 {
3450 struct stack_item *si;
3451 si = XNEW (struct stack_item);
3452 si->data = (gdb_byte *) xmalloc (len);
3453 si->len = len;
3454 si->prev = prev;
3455 memcpy (si->data, contents, len);
3456 return si;
3457 }
3458
3459 static struct stack_item *
3460 pop_stack_item (struct stack_item *si)
3461 {
3462 struct stack_item *dead = si;
3463 si = si->prev;
3464 xfree (dead->data);
3465 xfree (dead);
3466 return si;
3467 }
3468
3469 /* Implement the gdbarch type alignment method, overrides the generic
3470 alignment algorithm for anything that is arm specific. */
3471
3472 static ULONGEST
3473 arm_type_align (gdbarch *gdbarch, struct type *t)
3474 {
3475 t = check_typedef (t);
3476 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3477 {
3478 /* Use the natural alignment for vector types (the same for
3479 scalar type), but the maximum alignment is 64-bit. */
3480 if (TYPE_LENGTH (t) > 8)
3481 return 8;
3482 else
3483 return TYPE_LENGTH (t);
3484 }
3485
3486 /* Allow the common code to calculate the alignment. */
3487 return 0;
3488 }
3489
3490 /* Possible base types for a candidate for passing and returning in
3491 VFP registers. */
3492
3493 enum arm_vfp_cprc_base_type
3494 {
3495 VFP_CPRC_UNKNOWN,
3496 VFP_CPRC_SINGLE,
3497 VFP_CPRC_DOUBLE,
3498 VFP_CPRC_VEC64,
3499 VFP_CPRC_VEC128
3500 };
3501
3502 /* The length of one element of base type B. */
3503
3504 static unsigned
3505 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3506 {
3507 switch (b)
3508 {
3509 case VFP_CPRC_SINGLE:
3510 return 4;
3511 case VFP_CPRC_DOUBLE:
3512 return 8;
3513 case VFP_CPRC_VEC64:
3514 return 8;
3515 case VFP_CPRC_VEC128:
3516 return 16;
3517 default:
3518 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3519 (int) b);
3520 }
3521 }
3522
3523 /* The character ('s', 'd' or 'q') for the type of VFP register used
3524 for passing base type B. */
3525
3526 static int
3527 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3528 {
3529 switch (b)
3530 {
3531 case VFP_CPRC_SINGLE:
3532 return 's';
3533 case VFP_CPRC_DOUBLE:
3534 return 'd';
3535 case VFP_CPRC_VEC64:
3536 return 'd';
3537 case VFP_CPRC_VEC128:
3538 return 'q';
3539 default:
3540 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3541 (int) b);
3542 }
3543 }
3544
3545 /* Determine whether T may be part of a candidate for passing and
3546 returning in VFP registers, ignoring the limit on the total number
3547 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3548 classification of the first valid component found; if it is not
3549 VFP_CPRC_UNKNOWN, all components must have the same classification
3550 as *BASE_TYPE. If it is found that T contains a type not permitted
3551 for passing and returning in VFP registers, a type differently
3552 classified from *BASE_TYPE, or two types differently classified
3553 from each other, return -1, otherwise return the total number of
3554 base-type elements found (possibly 0 in an empty structure or
3555 array). Vector types are not currently supported, matching the
3556 generic AAPCS support. */
3557
3558 static int
3559 arm_vfp_cprc_sub_candidate (struct type *t,
3560 enum arm_vfp_cprc_base_type *base_type)
3561 {
3562 t = check_typedef (t);
3563 switch (t->code ())
3564 {
3565 case TYPE_CODE_FLT:
3566 switch (TYPE_LENGTH (t))
3567 {
3568 case 4:
3569 if (*base_type == VFP_CPRC_UNKNOWN)
3570 *base_type = VFP_CPRC_SINGLE;
3571 else if (*base_type != VFP_CPRC_SINGLE)
3572 return -1;
3573 return 1;
3574
3575 case 8:
3576 if (*base_type == VFP_CPRC_UNKNOWN)
3577 *base_type = VFP_CPRC_DOUBLE;
3578 else if (*base_type != VFP_CPRC_DOUBLE)
3579 return -1;
3580 return 1;
3581
3582 default:
3583 return -1;
3584 }
3585 break;
3586
3587 case TYPE_CODE_COMPLEX:
3588 /* Arguments of complex T where T is one of the types float or
3589 double get treated as if they are implemented as:
3590
3591 struct complexT
3592 {
3593 T real;
3594 T imag;
3595 };
3596
3597 */
3598 switch (TYPE_LENGTH (t))
3599 {
3600 case 8:
3601 if (*base_type == VFP_CPRC_UNKNOWN)
3602 *base_type = VFP_CPRC_SINGLE;
3603 else if (*base_type != VFP_CPRC_SINGLE)
3604 return -1;
3605 return 2;
3606
3607 case 16:
3608 if (*base_type == VFP_CPRC_UNKNOWN)
3609 *base_type = VFP_CPRC_DOUBLE;
3610 else if (*base_type != VFP_CPRC_DOUBLE)
3611 return -1;
3612 return 2;
3613
3614 default:
3615 return -1;
3616 }
3617 break;
3618
3619 case TYPE_CODE_ARRAY:
3620 {
3621 if (t->is_vector ())
3622 {
3623 /* A 64-bit or 128-bit containerized vector type are VFP
3624 CPRCs. */
3625 switch (TYPE_LENGTH (t))
3626 {
3627 case 8:
3628 if (*base_type == VFP_CPRC_UNKNOWN)
3629 *base_type = VFP_CPRC_VEC64;
3630 return 1;
3631 case 16:
3632 if (*base_type == VFP_CPRC_UNKNOWN)
3633 *base_type = VFP_CPRC_VEC128;
3634 return 1;
3635 default:
3636 return -1;
3637 }
3638 }
3639 else
3640 {
3641 int count;
3642 unsigned unitlen;
3643
3644 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3645 base_type);
3646 if (count == -1)
3647 return -1;
3648 if (TYPE_LENGTH (t) == 0)
3649 {
3650 gdb_assert (count == 0);
3651 return 0;
3652 }
3653 else if (count == 0)
3654 return -1;
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3657 return TYPE_LENGTH (t) / unitlen;
3658 }
3659 }
3660 break;
3661
3662 case TYPE_CODE_STRUCT:
3663 {
3664 int count = 0;
3665 unsigned unitlen;
3666 int i;
3667 for (i = 0; i < t->num_fields (); i++)
3668 {
3669 int sub_count = 0;
3670
3671 if (!field_is_static (&t->field (i)))
3672 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3673 base_type);
3674 if (sub_count == -1)
3675 return -1;
3676 count += sub_count;
3677 }
3678 if (TYPE_LENGTH (t) == 0)
3679 {
3680 gdb_assert (count == 0);
3681 return 0;
3682 }
3683 else if (count == 0)
3684 return -1;
3685 unitlen = arm_vfp_cprc_unit_length (*base_type);
3686 if (TYPE_LENGTH (t) != unitlen * count)
3687 return -1;
3688 return count;
3689 }
3690
3691 case TYPE_CODE_UNION:
3692 {
3693 int count = 0;
3694 unsigned unitlen;
3695 int i;
3696 for (i = 0; i < t->num_fields (); i++)
3697 {
3698 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3699 base_type);
3700 if (sub_count == -1)
3701 return -1;
3702 count = (count > sub_count ? count : sub_count);
3703 }
3704 if (TYPE_LENGTH (t) == 0)
3705 {
3706 gdb_assert (count == 0);
3707 return 0;
3708 }
3709 else if (count == 0)
3710 return -1;
3711 unitlen = arm_vfp_cprc_unit_length (*base_type);
3712 if (TYPE_LENGTH (t) != unitlen * count)
3713 return -1;
3714 return count;
3715 }
3716
3717 default:
3718 break;
3719 }
3720
3721 return -1;
3722 }
3723
3724 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3725 if passed to or returned from a non-variadic function with the VFP
3726 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3727 *BASE_TYPE to the base type for T and *COUNT to the number of
3728 elements of that base type before returning. */
3729
3730 static int
3731 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3732 int *count)
3733 {
3734 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3735 int c = arm_vfp_cprc_sub_candidate (t, &b);
3736 if (c <= 0 || c > 4)
3737 return 0;
3738 *base_type = b;
3739 *count = c;
3740 return 1;
3741 }
3742
3743 /* Return 1 if the VFP ABI should be used for passing arguments to and
3744 returning values from a function of type FUNC_TYPE, 0
3745 otherwise. */
3746
3747 static int
3748 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3749 {
3750 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3751
3752 /* Variadic functions always use the base ABI. Assume that functions
3753 without debug info are not variadic. */
3754 if (func_type && check_typedef (func_type)->has_varargs ())
3755 return 0;
3756
3757 /* The VFP ABI is only supported as a variant of AAPCS. */
3758 if (tdep->arm_abi != ARM_ABI_AAPCS)
3759 return 0;
3760
3761 return tdep->fp_model == ARM_FLOAT_VFP;
3762 }
3763
3764 /* We currently only support passing parameters in integer registers, which
3765 conforms with GCC's default model, and VFP argument passing following
3766 the VFP variant of AAPCS. Several other variants exist and
3767 we should probably support some of them based on the selected ABI. */
3768
3769 static CORE_ADDR
3770 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3771 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3772 struct value **args, CORE_ADDR sp,
3773 function_call_return_method return_method,
3774 CORE_ADDR struct_addr)
3775 {
3776 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3777 int argnum;
3778 int argreg;
3779 int nstack;
3780 struct stack_item *si = NULL;
3781 int use_vfp_abi;
3782 struct type *ftype;
3783 unsigned vfp_regs_free = (1 << 16) - 1;
3784 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
3785
3786 /* Determine the type of this function and whether the VFP ABI
3787 applies. */
3788 ftype = check_typedef (value_type (function));
3789 if (ftype->code () == TYPE_CODE_PTR)
3790 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3791 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3792
3793 /* Set the return address. For the ARM, the return breakpoint is
3794 always at BP_ADDR. */
3795 if (arm_pc_is_thumb (gdbarch, bp_addr))
3796 bp_addr |= 1;
3797 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3798
3799 /* Walk through the list of args and determine how large a temporary
3800 stack is required. Need to take care here as structs may be
3801 passed on the stack, and we have to push them. */
3802 nstack = 0;
3803
3804 argreg = ARM_A1_REGNUM;
3805 nstack = 0;
3806
3807 /* The struct_return pointer occupies the first parameter
3808 passing register. */
3809 if (return_method == return_method_struct)
3810 {
3811 arm_debug_printf ("struct return in %s = %s",
3812 gdbarch_register_name (gdbarch, argreg),
3813 paddress (gdbarch, struct_addr));
3814
3815 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3816 argreg++;
3817 }
3818
3819 for (argnum = 0; argnum < nargs; argnum++)
3820 {
3821 int len;
3822 struct type *arg_type;
3823 struct type *target_type;
3824 enum type_code typecode;
3825 const bfd_byte *val;
3826 int align;
3827 enum arm_vfp_cprc_base_type vfp_base_type;
3828 int vfp_base_count;
3829 int may_use_core_reg = 1;
3830
3831 arg_type = check_typedef (value_type (args[argnum]));
3832 len = TYPE_LENGTH (arg_type);
3833 target_type = TYPE_TARGET_TYPE (arg_type);
3834 typecode = arg_type->code ();
3835 val = value_contents (args[argnum]).data ();
3836
3837 align = type_align (arg_type);
3838 /* Round alignment up to a whole number of words. */
3839 align = (align + ARM_INT_REGISTER_SIZE - 1)
3840 & ~(ARM_INT_REGISTER_SIZE - 1);
3841 /* Different ABIs have different maximum alignments. */
3842 if (tdep->arm_abi == ARM_ABI_APCS)
3843 {
3844 /* The APCS ABI only requires word alignment. */
3845 align = ARM_INT_REGISTER_SIZE;
3846 }
3847 else
3848 {
3849 /* The AAPCS requires at most doubleword alignment. */
3850 if (align > ARM_INT_REGISTER_SIZE * 2)
3851 align = ARM_INT_REGISTER_SIZE * 2;
3852 }
3853
3854 if (use_vfp_abi
3855 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3856 &vfp_base_count))
3857 {
3858 int regno;
3859 int unit_length;
3860 int shift;
3861 unsigned mask;
3862
3863 /* Because this is a CPRC it cannot go in a core register or
3864 cause a core register to be skipped for alignment.
3865 Either it goes in VFP registers and the rest of this loop
3866 iteration is skipped for this argument, or it goes on the
3867 stack (and the stack alignment code is correct for this
3868 case). */
3869 may_use_core_reg = 0;
3870
3871 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3872 shift = unit_length / 4;
3873 mask = (1 << (shift * vfp_base_count)) - 1;
3874 for (regno = 0; regno < 16; regno += shift)
3875 if (((vfp_regs_free >> regno) & mask) == mask)
3876 break;
3877
3878 if (regno < 16)
3879 {
3880 int reg_char;
3881 int reg_scaled;
3882 int i;
3883
3884 vfp_regs_free &= ~(mask << regno);
3885 reg_scaled = regno / shift;
3886 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3887 for (i = 0; i < vfp_base_count; i++)
3888 {
3889 char name_buf[4];
3890 int regnum;
3891 if (reg_char == 'q')
3892 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3893 val + i * unit_length);
3894 else
3895 {
3896 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3897 reg_char, reg_scaled + i);
3898 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3899 strlen (name_buf));
3900 regcache->cooked_write (regnum, val + i * unit_length);
3901 }
3902 }
3903 continue;
3904 }
3905 else
3906 {
3907 /* This CPRC could not go in VFP registers, so all VFP
3908 registers are now marked as used. */
3909 vfp_regs_free = 0;
3910 }
3911 }
3912
3913 /* Push stack padding for doubleword alignment. */
3914 if (nstack & (align - 1))
3915 {
3916 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3917 nstack += ARM_INT_REGISTER_SIZE;
3918 }
3919
3920 /* Doubleword aligned quantities must go in even register pairs. */
3921 if (may_use_core_reg
3922 && argreg <= ARM_LAST_ARG_REGNUM
3923 && align > ARM_INT_REGISTER_SIZE
3924 && argreg & 1)
3925 argreg++;
3926
3927 /* If the argument is a pointer to a function, and it is a
3928 Thumb function, create a LOCAL copy of the value and set
3929 the THUMB bit in it. */
3930 if (TYPE_CODE_PTR == typecode
3931 && target_type != NULL
3932 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3933 {
3934 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3935 if (arm_pc_is_thumb (gdbarch, regval))
3936 {
3937 bfd_byte *copy = (bfd_byte *) alloca (len);
3938 store_unsigned_integer (copy, len, byte_order,
3939 MAKE_THUMB_ADDR (regval));
3940 val = copy;
3941 }
3942 }
3943
3944 /* Copy the argument to general registers or the stack in
3945 register-sized pieces. Large arguments are split between
3946 registers and stack. */
3947 while (len > 0)
3948 {
3949 int partial_len = len < ARM_INT_REGISTER_SIZE
3950 ? len : ARM_INT_REGISTER_SIZE;
3951 CORE_ADDR regval
3952 = extract_unsigned_integer (val, partial_len, byte_order);
3953
3954 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3955 {
3956 /* The argument is being passed in a general purpose
3957 register. */
3958 if (byte_order == BFD_ENDIAN_BIG)
3959 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3960
3961 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
3962 gdbarch_register_name (gdbarch, argreg),
3963 phex (regval, ARM_INT_REGISTER_SIZE));
3964
3965 regcache_cooked_write_unsigned (regcache, argreg, regval);
3966 argreg++;
3967 }
3968 else
3969 {
3970 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3971
3972 memset (buf, 0, sizeof (buf));
3973 store_unsigned_integer (buf, partial_len, byte_order, regval);
3974
3975 /* Push the arguments onto the stack. */
3976 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
3977 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3978 nstack += ARM_INT_REGISTER_SIZE;
3979 }
3980
3981 len -= partial_len;
3982 val += partial_len;
3983 }
3984 }
3985 /* If we have an odd number of words to push, then decrement the stack
3986 by one word now, so first stack argument will be dword aligned. */
3987 if (nstack & 4)
3988 sp -= 4;
3989
3990 while (si)
3991 {
3992 sp -= si->len;
3993 write_memory (sp, si->data, si->len);
3994 si = pop_stack_item (si);
3995 }
3996
3997 /* Finally, update teh SP register. */
3998 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3999
4000 return sp;
4001 }
4002
4003
4004 /* Always align the frame to an 8-byte boundary. This is required on
4005 some platforms and harmless on the rest. */
4006
4007 static CORE_ADDR
4008 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4009 {
4010 /* Align the stack to eight bytes. */
4011 return sp & ~ (CORE_ADDR) 7;
4012 }
4013
4014 static void
4015 print_fpu_flags (struct ui_file *file, int flags)
4016 {
4017 if (flags & (1 << 0))
4018 fputs_filtered ("IVO ", file);
4019 if (flags & (1 << 1))
4020 fputs_filtered ("DVZ ", file);
4021 if (flags & (1 << 2))
4022 fputs_filtered ("OFL ", file);
4023 if (flags & (1 << 3))
4024 fputs_filtered ("UFL ", file);
4025 if (flags & (1 << 4))
4026 fputs_filtered ("INX ", file);
4027 fputc_filtered ('\n', file);
4028 }
4029
4030 /* Print interesting information about the floating point processor
4031 (if present) or emulator. */
4032 static void
4033 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4034 struct frame_info *frame, const char *args)
4035 {
4036 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4037 int type;
4038
4039 type = (status >> 24) & 127;
4040 if (status & (1 << 31))
4041 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4042 else
4043 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4044 /* i18n: [floating point unit] mask */
4045 fputs_filtered (_("mask: "), file);
4046 print_fpu_flags (file, status >> 16);
4047 /* i18n: [floating point unit] flags */
4048 fputs_filtered (_("flags: "), file);
4049 print_fpu_flags (file, status);
4050 }
4051
4052 /* Construct the ARM extended floating point type. */
4053 static struct type *
4054 arm_ext_type (struct gdbarch *gdbarch)
4055 {
4056 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4057
4058 if (!tdep->arm_ext_type)
4059 tdep->arm_ext_type
4060 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4061 floatformats_arm_ext);
4062
4063 return tdep->arm_ext_type;
4064 }
4065
4066 static struct type *
4067 arm_neon_double_type (struct gdbarch *gdbarch)
4068 {
4069 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4070
4071 if (tdep->neon_double_type == NULL)
4072 {
4073 struct type *t, *elem;
4074
4075 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4076 TYPE_CODE_UNION);
4077 elem = builtin_type (gdbarch)->builtin_uint8;
4078 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4079 elem = builtin_type (gdbarch)->builtin_uint16;
4080 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4081 elem = builtin_type (gdbarch)->builtin_uint32;
4082 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4083 elem = builtin_type (gdbarch)->builtin_uint64;
4084 append_composite_type_field (t, "u64", elem);
4085 elem = builtin_type (gdbarch)->builtin_float;
4086 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4087 elem = builtin_type (gdbarch)->builtin_double;
4088 append_composite_type_field (t, "f64", elem);
4089
4090 t->set_is_vector (true);
4091 t->set_name ("neon_d");
4092 tdep->neon_double_type = t;
4093 }
4094
4095 return tdep->neon_double_type;
4096 }
4097
4098 /* FIXME: The vector types are not correctly ordered on big-endian
4099 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4100 bits of d0 - regardless of what unit size is being held in d0. So
4101 the offset of the first uint8 in d0 is 7, but the offset of the
4102 first float is 4. This code works as-is for little-endian
4103 targets. */
4104
4105 static struct type *
4106 arm_neon_quad_type (struct gdbarch *gdbarch)
4107 {
4108 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4109
4110 if (tdep->neon_quad_type == NULL)
4111 {
4112 struct type *t, *elem;
4113
4114 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4115 TYPE_CODE_UNION);
4116 elem = builtin_type (gdbarch)->builtin_uint8;
4117 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4118 elem = builtin_type (gdbarch)->builtin_uint16;
4119 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4120 elem = builtin_type (gdbarch)->builtin_uint32;
4121 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4122 elem = builtin_type (gdbarch)->builtin_uint64;
4123 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4124 elem = builtin_type (gdbarch)->builtin_float;
4125 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4126 elem = builtin_type (gdbarch)->builtin_double;
4127 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4128
4129 t->set_is_vector (true);
4130 t->set_name ("neon_q");
4131 tdep->neon_quad_type = t;
4132 }
4133
4134 return tdep->neon_quad_type;
4135 }
4136
4137 /* Return true if REGNUM is a Q pseudo register. Return false
4138 otherwise.
4139
4140 REGNUM is the raw register number and not a pseudo-relative register
4141 number. */
4142
4143 static bool
4144 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4145 {
4146 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4147
4148 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4149 MVE (Q0~Q7) features. */
4150 if (tdep->have_q_pseudos
4151 && regnum >= tdep->q_pseudo_base
4152 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
4153 return true;
4154
4155 return false;
4156 }
4157
4158 /* Return true if REGNUM is a VFP S pseudo register. Return false
4159 otherwise.
4160
4161 REGNUM is the raw register number and not a pseudo-relative register
4162 number. */
4163
4164 static bool
4165 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
4166 {
4167 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4168
4169 if (tdep->have_s_pseudos
4170 && regnum >= tdep->s_pseudo_base
4171 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
4172 return true;
4173
4174 return false;
4175 }
4176
4177 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
4178 otherwise.
4179
4180 REGNUM is the raw register number and not a pseudo-relative register
4181 number. */
4182
4183 static bool
4184 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
4185 {
4186 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4187
4188 if (tdep->have_mve
4189 && regnum >= tdep->mve_pseudo_base
4190 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
4191 return true;
4192
4193 return false;
4194 }
4195
4196 /* Return the GDB type object for the "standard" data type of data in
4197 register N. */
4198
4199 static struct type *
4200 arm_register_type (struct gdbarch *gdbarch, int regnum)
4201 {
4202 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4203
4204 if (is_s_pseudo (gdbarch, regnum))
4205 return builtin_type (gdbarch)->builtin_float;
4206
4207 if (is_q_pseudo (gdbarch, regnum))
4208 return arm_neon_quad_type (gdbarch);
4209
4210 if (is_mve_pseudo (gdbarch, regnum))
4211 return builtin_type (gdbarch)->builtin_int16;
4212
4213 /* If the target description has register information, we are only
4214 in this function so that we can override the types of
4215 double-precision registers for NEON. */
4216 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4217 {
4218 struct type *t = tdesc_register_type (gdbarch, regnum);
4219
4220 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4221 && t->code () == TYPE_CODE_FLT
4222 && tdep->have_neon)
4223 return arm_neon_double_type (gdbarch);
4224 else
4225 return t;
4226 }
4227
4228 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4229 {
4230 if (!tdep->have_fpa_registers)
4231 return builtin_type (gdbarch)->builtin_void;
4232
4233 return arm_ext_type (gdbarch);
4234 }
4235 else if (regnum == ARM_SP_REGNUM)
4236 return builtin_type (gdbarch)->builtin_data_ptr;
4237 else if (regnum == ARM_PC_REGNUM)
4238 return builtin_type (gdbarch)->builtin_func_ptr;
4239 else if (regnum >= ARRAY_SIZE (arm_register_names))
4240 /* These registers are only supported on targets which supply
4241 an XML description. */
4242 return builtin_type (gdbarch)->builtin_int0;
4243 else
4244 return builtin_type (gdbarch)->builtin_uint32;
4245 }
4246
4247 /* Map a DWARF register REGNUM onto the appropriate GDB register
4248 number. */
4249
4250 static int
4251 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4252 {
4253 /* Core integer regs. */
4254 if (reg >= 0 && reg <= 15)
4255 return reg;
4256
4257 /* Legacy FPA encoding. These were once used in a way which
4258 overlapped with VFP register numbering, so their use is
4259 discouraged, but GDB doesn't support the ARM toolchain
4260 which used them for VFP. */
4261 if (reg >= 16 && reg <= 23)
4262 return ARM_F0_REGNUM + reg - 16;
4263
4264 /* New assignments for the FPA registers. */
4265 if (reg >= 96 && reg <= 103)
4266 return ARM_F0_REGNUM + reg - 96;
4267
4268 /* WMMX register assignments. */
4269 if (reg >= 104 && reg <= 111)
4270 return ARM_WCGR0_REGNUM + reg - 104;
4271
4272 if (reg >= 112 && reg <= 127)
4273 return ARM_WR0_REGNUM + reg - 112;
4274
4275 if (reg >= 192 && reg <= 199)
4276 return ARM_WC0_REGNUM + reg - 192;
4277
4278 /* VFP v2 registers. A double precision value is actually
4279 in d1 rather than s2, but the ABI only defines numbering
4280 for the single precision registers. This will "just work"
4281 in GDB for little endian targets (we'll read eight bytes,
4282 starting in s0 and then progressing to s1), but will be
4283 reversed on big endian targets with VFP. This won't
4284 be a problem for the new Neon quad registers; you're supposed
4285 to use DW_OP_piece for those. */
4286 if (reg >= 64 && reg <= 95)
4287 {
4288 char name_buf[4];
4289
4290 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4291 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4292 strlen (name_buf));
4293 }
4294
4295 /* VFP v3 / Neon registers. This range is also used for VFP v2
4296 registers, except that it now describes d0 instead of s0. */
4297 if (reg >= 256 && reg <= 287)
4298 {
4299 char name_buf[4];
4300
4301 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4302 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4303 strlen (name_buf));
4304 }
4305
4306 return -1;
4307 }
4308
4309 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4310 static int
4311 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4312 {
4313 int reg = regnum;
4314 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4315
4316 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4317 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4318
4319 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4320 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4321
4322 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4323 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4324
4325 if (reg < NUM_GREGS)
4326 return SIM_ARM_R0_REGNUM + reg;
4327 reg -= NUM_GREGS;
4328
4329 if (reg < NUM_FREGS)
4330 return SIM_ARM_FP0_REGNUM + reg;
4331 reg -= NUM_FREGS;
4332
4333 if (reg < NUM_SREGS)
4334 return SIM_ARM_FPS_REGNUM + reg;
4335 reg -= NUM_SREGS;
4336
4337 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4338 }
4339
4340 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4341 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4342 NULL if an error occurs. BUF is freed. */
4343
4344 static gdb_byte *
4345 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4346 int old_len, int new_len)
4347 {
4348 gdb_byte *new_buf;
4349 int bytes_to_read = new_len - old_len;
4350
4351 new_buf = (gdb_byte *) xmalloc (new_len);
4352 memcpy (new_buf + bytes_to_read, buf, old_len);
4353 xfree (buf);
4354 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4355 {
4356 xfree (new_buf);
4357 return NULL;
4358 }
4359 return new_buf;
4360 }
4361
4362 /* An IT block is at most the 2-byte IT instruction followed by
4363 four 4-byte instructions. The furthest back we must search to
4364 find an IT block that affects the current instruction is thus
4365 2 + 3 * 4 == 14 bytes. */
4366 #define MAX_IT_BLOCK_PREFIX 14
4367
4368 /* Use a quick scan if there are more than this many bytes of
4369 code. */
4370 #define IT_SCAN_THRESHOLD 32
4371
4372 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4373 A breakpoint in an IT block may not be hit, depending on the
4374 condition flags. */
4375 static CORE_ADDR
4376 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4377 {
4378 gdb_byte *buf;
4379 char map_type;
4380 CORE_ADDR boundary, func_start;
4381 int buf_len;
4382 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4383 int i, any, last_it, last_it_count;
4384 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
4385
4386 /* If we are using BKPT breakpoints, none of this is necessary. */
4387 if (tdep->thumb2_breakpoint == NULL)
4388 return bpaddr;
4389
4390 /* ARM mode does not have this problem. */
4391 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4392 return bpaddr;
4393
4394 /* We are setting a breakpoint in Thumb code that could potentially
4395 contain an IT block. The first step is to find how much Thumb
4396 code there is; we do not need to read outside of known Thumb
4397 sequences. */
4398 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4399 if (map_type == 0)
4400 /* Thumb-2 code must have mapping symbols to have a chance. */
4401 return bpaddr;
4402
4403 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4404
4405 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4406 && func_start > boundary)
4407 boundary = func_start;
4408
4409 /* Search for a candidate IT instruction. We have to do some fancy
4410 footwork to distinguish a real IT instruction from the second
4411 half of a 32-bit instruction, but there is no need for that if
4412 there's no candidate. */
4413 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4414 if (buf_len == 0)
4415 /* No room for an IT instruction. */
4416 return bpaddr;
4417
4418 buf = (gdb_byte *) xmalloc (buf_len);
4419 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4420 return bpaddr;
4421 any = 0;
4422 for (i = 0; i < buf_len; i += 2)
4423 {
4424 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4425 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4426 {
4427 any = 1;
4428 break;
4429 }
4430 }
4431
4432 if (any == 0)
4433 {
4434 xfree (buf);
4435 return bpaddr;
4436 }
4437
4438 /* OK, the code bytes before this instruction contain at least one
4439 halfword which resembles an IT instruction. We know that it's
4440 Thumb code, but there are still two possibilities. Either the
4441 halfword really is an IT instruction, or it is the second half of
4442 a 32-bit Thumb instruction. The only way we can tell is to
4443 scan forwards from a known instruction boundary. */
4444 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4445 {
4446 int definite;
4447
4448 /* There's a lot of code before this instruction. Start with an
4449 optimistic search; it's easy to recognize halfwords that can
4450 not be the start of a 32-bit instruction, and use that to
4451 lock on to the instruction boundaries. */
4452 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4453 if (buf == NULL)
4454 return bpaddr;
4455 buf_len = IT_SCAN_THRESHOLD;
4456
4457 definite = 0;
4458 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4459 {
4460 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4461 if (thumb_insn_size (inst1) == 2)
4462 {
4463 definite = 1;
4464 break;
4465 }
4466 }
4467
4468 /* At this point, if DEFINITE, BUF[I] is the first place we
4469 are sure that we know the instruction boundaries, and it is far
4470 enough from BPADDR that we could not miss an IT instruction
4471 affecting BPADDR. If ! DEFINITE, give up - start from a
4472 known boundary. */
4473 if (! definite)
4474 {
4475 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4476 bpaddr - boundary);
4477 if (buf == NULL)
4478 return bpaddr;
4479 buf_len = bpaddr - boundary;
4480 i = 0;
4481 }
4482 }
4483 else
4484 {
4485 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4486 if (buf == NULL)
4487 return bpaddr;
4488 buf_len = bpaddr - boundary;
4489 i = 0;
4490 }
4491
4492 /* Scan forwards. Find the last IT instruction before BPADDR. */
4493 last_it = -1;
4494 last_it_count = 0;
4495 while (i < buf_len)
4496 {
4497 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4498 last_it_count--;
4499 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4500 {
4501 last_it = i;
4502 if (inst1 & 0x0001)
4503 last_it_count = 4;
4504 else if (inst1 & 0x0002)
4505 last_it_count = 3;
4506 else if (inst1 & 0x0004)
4507 last_it_count = 2;
4508 else
4509 last_it_count = 1;
4510 }
4511 i += thumb_insn_size (inst1);
4512 }
4513
4514 xfree (buf);
4515
4516 if (last_it == -1)
4517 /* There wasn't really an IT instruction after all. */
4518 return bpaddr;
4519
4520 if (last_it_count < 1)
4521 /* It was too far away. */
4522 return bpaddr;
4523
4524 /* This really is a trouble spot. Move the breakpoint to the IT
4525 instruction. */
4526 return bpaddr - buf_len + last_it;
4527 }
4528
4529 /* ARM displaced stepping support.
4530
4531 Generally ARM displaced stepping works as follows:
4532
4533 1. When an instruction is to be single-stepped, it is first decoded by
4534 arm_process_displaced_insn. Depending on the type of instruction, it is
4535 then copied to a scratch location, possibly in a modified form. The
4536 copy_* set of functions performs such modification, as necessary. A
4537 breakpoint is placed after the modified instruction in the scratch space
4538 to return control to GDB. Note in particular that instructions which
4539 modify the PC will no longer do so after modification.
4540
4541 2. The instruction is single-stepped, by setting the PC to the scratch
4542 location address, and resuming. Control returns to GDB when the
4543 breakpoint is hit.
4544
4545 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4546 function used for the current instruction. This function's job is to
4547 put the CPU/memory state back to what it would have been if the
4548 instruction had been executed unmodified in its original location. */
4549
4550 /* NOP instruction (mov r0, r0). */
4551 #define ARM_NOP 0xe1a00000
4552 #define THUMB_NOP 0x4600
4553
4554 /* Helper for register reads for displaced stepping. In particular, this
4555 returns the PC as it would be seen by the instruction at its original
4556 location. */
4557
4558 ULONGEST
4559 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4560 int regno)
4561 {
4562 ULONGEST ret;
4563 CORE_ADDR from = dsc->insn_addr;
4564
4565 if (regno == ARM_PC_REGNUM)
4566 {
4567 /* Compute pipeline offset:
4568 - When executing an ARM instruction, PC reads as the address of the
4569 current instruction plus 8.
4570 - When executing a Thumb instruction, PC reads as the address of the
4571 current instruction plus 4. */
4572
4573 if (!dsc->is_thumb)
4574 from += 8;
4575 else
4576 from += 4;
4577
4578 displaced_debug_printf ("read pc value %.8lx",
4579 (unsigned long) from);
4580 return (ULONGEST) from;
4581 }
4582 else
4583 {
4584 regcache_cooked_read_unsigned (regs, regno, &ret);
4585
4586 displaced_debug_printf ("read r%d value %.8lx",
4587 regno, (unsigned long) ret);
4588
4589 return ret;
4590 }
4591 }
4592
4593 static int
4594 displaced_in_arm_mode (struct regcache *regs)
4595 {
4596 ULONGEST ps;
4597 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4598
4599 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4600
4601 return (ps & t_bit) == 0;
4602 }
4603
4604 /* Write to the PC as from a branch instruction. */
4605
4606 static void
4607 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4608 ULONGEST val)
4609 {
4610 if (!dsc->is_thumb)
4611 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4612 architecture versions < 6. */
4613 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4614 val & ~(ULONGEST) 0x3);
4615 else
4616 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4617 val & ~(ULONGEST) 0x1);
4618 }
4619
4620 /* Write to the PC as from a branch-exchange instruction. */
4621
4622 static void
4623 bx_write_pc (struct regcache *regs, ULONGEST val)
4624 {
4625 ULONGEST ps;
4626 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4627
4628 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4629
4630 if ((val & 1) == 1)
4631 {
4632 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4633 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4634 }
4635 else if ((val & 2) == 0)
4636 {
4637 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4638 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4639 }
4640 else
4641 {
4642 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4643 mode, align dest to 4 bytes). */
4644 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4645 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4646 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4647 }
4648 }
4649
4650 /* Write to the PC as if from a load instruction. */
4651
4652 static void
4653 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4654 ULONGEST val)
4655 {
4656 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4657 bx_write_pc (regs, val);
4658 else
4659 branch_write_pc (regs, dsc, val);
4660 }
4661
4662 /* Write to the PC as if from an ALU instruction. */
4663
4664 static void
4665 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4666 ULONGEST val)
4667 {
4668 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4669 bx_write_pc (regs, val);
4670 else
4671 branch_write_pc (regs, dsc, val);
4672 }
4673
4674 /* Helper for writing to registers for displaced stepping. Writing to the PC
4675 has a varying effects depending on the instruction which does the write:
4676 this is controlled by the WRITE_PC argument. */
4677
4678 void
4679 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4680 int regno, ULONGEST val, enum pc_write_style write_pc)
4681 {
4682 if (regno == ARM_PC_REGNUM)
4683 {
4684 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4685
4686 switch (write_pc)
4687 {
4688 case BRANCH_WRITE_PC:
4689 branch_write_pc (regs, dsc, val);
4690 break;
4691
4692 case BX_WRITE_PC:
4693 bx_write_pc (regs, val);
4694 break;
4695
4696 case LOAD_WRITE_PC:
4697 load_write_pc (regs, dsc, val);
4698 break;
4699
4700 case ALU_WRITE_PC:
4701 alu_write_pc (regs, dsc, val);
4702 break;
4703
4704 case CANNOT_WRITE_PC:
4705 warning (_("Instruction wrote to PC in an unexpected way when "
4706 "single-stepping"));
4707 break;
4708
4709 default:
4710 internal_error (__FILE__, __LINE__,
4711 _("Invalid argument to displaced_write_reg"));
4712 }
4713
4714 dsc->wrote_to_pc = 1;
4715 }
4716 else
4717 {
4718 displaced_debug_printf ("writing r%d value %.8lx",
4719 regno, (unsigned long) val);
4720 regcache_cooked_write_unsigned (regs, regno, val);
4721 }
4722 }
4723
4724 /* This function is used to concisely determine if an instruction INSN
4725 references PC. Register fields of interest in INSN should have the
4726 corresponding fields of BITMASK set to 0b1111. The function
4727 returns return 1 if any of these fields in INSN reference the PC
4728 (also 0b1111, r15), else it returns 0. */
4729
4730 static int
4731 insn_references_pc (uint32_t insn, uint32_t bitmask)
4732 {
4733 uint32_t lowbit = 1;
4734
4735 while (bitmask != 0)
4736 {
4737 uint32_t mask;
4738
4739 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4740 ;
4741
4742 if (!lowbit)
4743 break;
4744
4745 mask = lowbit * 0xf;
4746
4747 if ((insn & mask) == mask)
4748 return 1;
4749
4750 bitmask &= ~mask;
4751 }
4752
4753 return 0;
4754 }
4755
4756 /* The simplest copy function. Many instructions have the same effect no
4757 matter what address they are executed at: in those cases, use this. */
4758
4759 static int
4760 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
4761 arm_displaced_step_copy_insn_closure *dsc)
4762 {
4763 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4764 (unsigned long) insn, iname);
4765
4766 dsc->modinsn[0] = insn;
4767
4768 return 0;
4769 }
4770
4771 static int
4772 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4773 uint16_t insn2, const char *iname,
4774 arm_displaced_step_copy_insn_closure *dsc)
4775 {
4776 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4777 "unmodified", insn1, insn2, iname);
4778
4779 dsc->modinsn[0] = insn1;
4780 dsc->modinsn[1] = insn2;
4781 dsc->numinsns = 2;
4782
4783 return 0;
4784 }
4785
4786 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4787 modification. */
4788 static int
4789 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4790 const char *iname,
4791 arm_displaced_step_copy_insn_closure *dsc)
4792 {
4793 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4794 insn, iname);
4795
4796 dsc->modinsn[0] = insn;
4797
4798 return 0;
4799 }
4800
4801 /* Preload instructions with immediate offset. */
4802
4803 static void
4804 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
4805 arm_displaced_step_copy_insn_closure *dsc)
4806 {
4807 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4808 if (!dsc->u.preload.immed)
4809 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4810 }
4811
4812 static void
4813 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4814 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4815 {
4816 ULONGEST rn_val;
4817 /* Preload instructions:
4818
4819 {pli/pld} [rn, #+/-imm]
4820 ->
4821 {pli/pld} [r0, #+/-imm]. */
4822
4823 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4824 rn_val = displaced_read_reg (regs, dsc, rn);
4825 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4826 dsc->u.preload.immed = 1;
4827
4828 dsc->cleanup = &cleanup_preload;
4829 }
4830
4831 static int
4832 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4833 arm_displaced_step_copy_insn_closure *dsc)
4834 {
4835 unsigned int rn = bits (insn, 16, 19);
4836
4837 if (!insn_references_pc (insn, 0x000f0000ul))
4838 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4839
4840 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4841
4842 dsc->modinsn[0] = insn & 0xfff0ffff;
4843
4844 install_preload (gdbarch, regs, dsc, rn);
4845
4846 return 0;
4847 }
4848
4849 static int
4850 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4851 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4852 {
4853 unsigned int rn = bits (insn1, 0, 3);
4854 unsigned int u_bit = bit (insn1, 7);
4855 int imm12 = bits (insn2, 0, 11);
4856 ULONGEST pc_val;
4857
4858 if (rn != ARM_PC_REGNUM)
4859 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4860
4861 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4862 PLD (literal) Encoding T1. */
4863 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4864 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4865 imm12);
4866
4867 if (!u_bit)
4868 imm12 = -1 * imm12;
4869
4870 /* Rewrite instruction {pli/pld} PC imm12 into:
4871 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4872
4873 {pli/pld} [r0, r1]
4874
4875 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4876
4877 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4878 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4879
4880 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4881
4882 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4883 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4884 dsc->u.preload.immed = 0;
4885
4886 /* {pli/pld} [r0, r1] */
4887 dsc->modinsn[0] = insn1 & 0xfff0;
4888 dsc->modinsn[1] = 0xf001;
4889 dsc->numinsns = 2;
4890
4891 dsc->cleanup = &cleanup_preload;
4892 return 0;
4893 }
4894
4895 /* Preload instructions with register offset. */
4896
4897 static void
4898 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4899 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
4900 unsigned int rm)
4901 {
4902 ULONGEST rn_val, rm_val;
4903
4904 /* Preload register-offset instructions:
4905
4906 {pli/pld} [rn, rm {, shift}]
4907 ->
4908 {pli/pld} [r0, r1 {, shift}]. */
4909
4910 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4911 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4912 rn_val = displaced_read_reg (regs, dsc, rn);
4913 rm_val = displaced_read_reg (regs, dsc, rm);
4914 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4915 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4916 dsc->u.preload.immed = 0;
4917
4918 dsc->cleanup = &cleanup_preload;
4919 }
4920
4921 static int
4922 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4923 struct regcache *regs,
4924 arm_displaced_step_copy_insn_closure *dsc)
4925 {
4926 unsigned int rn = bits (insn, 16, 19);
4927 unsigned int rm = bits (insn, 0, 3);
4928
4929
4930 if (!insn_references_pc (insn, 0x000f000ful))
4931 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4932
4933 displaced_debug_printf ("copying preload insn %.8lx",
4934 (unsigned long) insn);
4935
4936 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4937
4938 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4939 return 0;
4940 }
4941
4942 /* Copy/cleanup coprocessor load and store instructions. */
4943
4944 static void
4945 cleanup_copro_load_store (struct gdbarch *gdbarch,
4946 struct regcache *regs,
4947 arm_displaced_step_copy_insn_closure *dsc)
4948 {
4949 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4950
4951 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4952
4953 if (dsc->u.ldst.writeback)
4954 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4955 }
4956
4957 static void
4958 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4959 arm_displaced_step_copy_insn_closure *dsc,
4960 int writeback, unsigned int rn)
4961 {
4962 ULONGEST rn_val;
4963
4964 /* Coprocessor load/store instructions:
4965
4966 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4967 ->
4968 {stc/stc2} [r0, #+/-imm].
4969
4970 ldc/ldc2 are handled identically. */
4971
4972 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4973 rn_val = displaced_read_reg (regs, dsc, rn);
4974 /* PC should be 4-byte aligned. */
4975 rn_val = rn_val & 0xfffffffc;
4976 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4977
4978 dsc->u.ldst.writeback = writeback;
4979 dsc->u.ldst.rn = rn;
4980
4981 dsc->cleanup = &cleanup_copro_load_store;
4982 }
4983
4984 static int
4985 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4986 struct regcache *regs,
4987 arm_displaced_step_copy_insn_closure *dsc)
4988 {
4989 unsigned int rn = bits (insn, 16, 19);
4990
4991 if (!insn_references_pc (insn, 0x000f0000ul))
4992 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4993
4994 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4995 (unsigned long) insn);
4996
4997 dsc->modinsn[0] = insn & 0xfff0ffff;
4998
4999 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5000
5001 return 0;
5002 }
5003
5004 static int
5005 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5006 uint16_t insn2, struct regcache *regs,
5007 arm_displaced_step_copy_insn_closure *dsc)
5008 {
5009 unsigned int rn = bits (insn1, 0, 3);
5010
5011 if (rn != ARM_PC_REGNUM)
5012 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5013 "copro load/store", dsc);
5014
5015 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5016 insn1, insn2);
5017
5018 dsc->modinsn[0] = insn1 & 0xfff0;
5019 dsc->modinsn[1] = insn2;
5020 dsc->numinsns = 2;
5021
5022 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5023 doesn't support writeback, so pass 0. */
5024 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5025
5026 return 0;
5027 }
5028
5029 /* Clean up branch instructions (actually perform the branch, by setting
5030 PC). */
5031
5032 static void
5033 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5034 arm_displaced_step_copy_insn_closure *dsc)
5035 {
5036 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5037 int branch_taken = condition_true (dsc->u.branch.cond, status);
5038 enum pc_write_style write_pc = dsc->u.branch.exchange
5039 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5040
5041 if (!branch_taken)
5042 return;
5043
5044 if (dsc->u.branch.link)
5045 {
5046 /* The value of LR should be the next insn of current one. In order
5047 not to confuse logic handling later insn `bx lr', if current insn mode
5048 is Thumb, the bit 0 of LR value should be set to 1. */
5049 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5050
5051 if (dsc->is_thumb)
5052 next_insn_addr |= 0x1;
5053
5054 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5055 CANNOT_WRITE_PC);
5056 }
5057
5058 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5059 }
5060
5061 /* Copy B/BL/BLX instructions with immediate destinations. */
5062
5063 static void
5064 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5065 arm_displaced_step_copy_insn_closure *dsc,
5066 unsigned int cond, int exchange, int link, long offset)
5067 {
5068 /* Implement "BL<cond> <label>" as:
5069
5070 Preparation: cond <- instruction condition
5071 Insn: mov r0, r0 (nop)
5072 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5073
5074 B<cond> similar, but don't set r14 in cleanup. */
5075
5076 dsc->u.branch.cond = cond;
5077 dsc->u.branch.link = link;
5078 dsc->u.branch.exchange = exchange;
5079
5080 dsc->u.branch.dest = dsc->insn_addr;
5081 if (link && exchange)
5082 /* For BLX, offset is computed from the Align (PC, 4). */
5083 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5084
5085 if (dsc->is_thumb)
5086 dsc->u.branch.dest += 4 + offset;
5087 else
5088 dsc->u.branch.dest += 8 + offset;
5089
5090 dsc->cleanup = &cleanup_branch;
5091 }
5092 static int
5093 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5094 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5095 {
5096 unsigned int cond = bits (insn, 28, 31);
5097 int exchange = (cond == 0xf);
5098 int link = exchange || bit (insn, 24);
5099 long offset;
5100
5101 displaced_debug_printf ("copying %s immediate insn %.8lx",
5102 (exchange) ? "blx" : (link) ? "bl" : "b",
5103 (unsigned long) insn);
5104 if (exchange)
5105 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5106 then arrange the switch into Thumb mode. */
5107 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5108 else
5109 offset = bits (insn, 0, 23) << 2;
5110
5111 if (bit (offset, 25))
5112 offset = offset | ~0x3ffffff;
5113
5114 dsc->modinsn[0] = ARM_NOP;
5115
5116 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5117 return 0;
5118 }
5119
5120 static int
5121 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5122 uint16_t insn2, struct regcache *regs,
5123 arm_displaced_step_copy_insn_closure *dsc)
5124 {
5125 int link = bit (insn2, 14);
5126 int exchange = link && !bit (insn2, 12);
5127 int cond = INST_AL;
5128 long offset = 0;
5129 int j1 = bit (insn2, 13);
5130 int j2 = bit (insn2, 11);
5131 int s = sbits (insn1, 10, 10);
5132 int i1 = !(j1 ^ bit (insn1, 10));
5133 int i2 = !(j2 ^ bit (insn1, 10));
5134
5135 if (!link && !exchange) /* B */
5136 {
5137 offset = (bits (insn2, 0, 10) << 1);
5138 if (bit (insn2, 12)) /* Encoding T4 */
5139 {
5140 offset |= (bits (insn1, 0, 9) << 12)
5141 | (i2 << 22)
5142 | (i1 << 23)
5143 | (s << 24);
5144 cond = INST_AL;
5145 }
5146 else /* Encoding T3 */
5147 {
5148 offset |= (bits (insn1, 0, 5) << 12)
5149 | (j1 << 18)
5150 | (j2 << 19)
5151 | (s << 20);
5152 cond = bits (insn1, 6, 9);
5153 }
5154 }
5155 else
5156 {
5157 offset = (bits (insn1, 0, 9) << 12);
5158 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5159 offset |= exchange ?
5160 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5161 }
5162
5163 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5164 link ? (exchange) ? "blx" : "bl" : "b",
5165 insn1, insn2, offset);
5166
5167 dsc->modinsn[0] = THUMB_NOP;
5168
5169 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5170 return 0;
5171 }
5172
5173 /* Copy B Thumb instructions. */
5174 static int
5175 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5176 arm_displaced_step_copy_insn_closure *dsc)
5177 {
5178 unsigned int cond = 0;
5179 int offset = 0;
5180 unsigned short bit_12_15 = bits (insn, 12, 15);
5181 CORE_ADDR from = dsc->insn_addr;
5182
5183 if (bit_12_15 == 0xd)
5184 {
5185 /* offset = SignExtend (imm8:0, 32) */
5186 offset = sbits ((insn << 1), 0, 8);
5187 cond = bits (insn, 8, 11);
5188 }
5189 else if (bit_12_15 == 0xe) /* Encoding T2 */
5190 {
5191 offset = sbits ((insn << 1), 0, 11);
5192 cond = INST_AL;
5193 }
5194
5195 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5196 insn, offset);
5197
5198 dsc->u.branch.cond = cond;
5199 dsc->u.branch.link = 0;
5200 dsc->u.branch.exchange = 0;
5201 dsc->u.branch.dest = from + 4 + offset;
5202
5203 dsc->modinsn[0] = THUMB_NOP;
5204
5205 dsc->cleanup = &cleanup_branch;
5206
5207 return 0;
5208 }
5209
5210 /* Copy BX/BLX with register-specified destinations. */
5211
5212 static void
5213 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5214 arm_displaced_step_copy_insn_closure *dsc, int link,
5215 unsigned int cond, unsigned int rm)
5216 {
5217 /* Implement {BX,BLX}<cond> <reg>" as:
5218
5219 Preparation: cond <- instruction condition
5220 Insn: mov r0, r0 (nop)
5221 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5222
5223 Don't set r14 in cleanup for BX. */
5224
5225 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5226
5227 dsc->u.branch.cond = cond;
5228 dsc->u.branch.link = link;
5229
5230 dsc->u.branch.exchange = 1;
5231
5232 dsc->cleanup = &cleanup_branch;
5233 }
5234
5235 static int
5236 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5237 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5238 {
5239 unsigned int cond = bits (insn, 28, 31);
5240 /* BX: x12xxx1x
5241 BLX: x12xxx3x. */
5242 int link = bit (insn, 5);
5243 unsigned int rm = bits (insn, 0, 3);
5244
5245 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5246
5247 dsc->modinsn[0] = ARM_NOP;
5248
5249 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5250 return 0;
5251 }
5252
5253 static int
5254 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 arm_displaced_step_copy_insn_closure *dsc)
5257 {
5258 int link = bit (insn, 7);
5259 unsigned int rm = bits (insn, 3, 6);
5260
5261 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5262
5263 dsc->modinsn[0] = THUMB_NOP;
5264
5265 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5266
5267 return 0;
5268 }
5269
5270
5271 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5272
5273 static void
5274 cleanup_alu_imm (struct gdbarch *gdbarch,
5275 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5276 {
5277 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5278 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5279 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5280 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5281 }
5282
5283 static int
5284 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5285 arm_displaced_step_copy_insn_closure *dsc)
5286 {
5287 unsigned int rn = bits (insn, 16, 19);
5288 unsigned int rd = bits (insn, 12, 15);
5289 unsigned int op = bits (insn, 21, 24);
5290 int is_mov = (op == 0xd);
5291 ULONGEST rd_val, rn_val;
5292
5293 if (!insn_references_pc (insn, 0x000ff000ul))
5294 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5295
5296 displaced_debug_printf ("copying immediate %s insn %.8lx",
5297 is_mov ? "move" : "ALU",
5298 (unsigned long) insn);
5299
5300 /* Instruction is of form:
5301
5302 <op><cond> rd, [rn,] #imm
5303
5304 Rewrite as:
5305
5306 Preparation: tmp1, tmp2 <- r0, r1;
5307 r0, r1 <- rd, rn
5308 Insn: <op><cond> r0, r1, #imm
5309 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5310 */
5311
5312 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5313 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5314 rn_val = displaced_read_reg (regs, dsc, rn);
5315 rd_val = displaced_read_reg (regs, dsc, rd);
5316 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5317 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5318 dsc->rd = rd;
5319
5320 if (is_mov)
5321 dsc->modinsn[0] = insn & 0xfff00fff;
5322 else
5323 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5324
5325 dsc->cleanup = &cleanup_alu_imm;
5326
5327 return 0;
5328 }
5329
5330 static int
5331 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5332 uint16_t insn2, struct regcache *regs,
5333 arm_displaced_step_copy_insn_closure *dsc)
5334 {
5335 unsigned int op = bits (insn1, 5, 8);
5336 unsigned int rn, rm, rd;
5337 ULONGEST rd_val, rn_val;
5338
5339 rn = bits (insn1, 0, 3); /* Rn */
5340 rm = bits (insn2, 0, 3); /* Rm */
5341 rd = bits (insn2, 8, 11); /* Rd */
5342
5343 /* This routine is only called for instruction MOV. */
5344 gdb_assert (op == 0x2 && rn == 0xf);
5345
5346 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5347 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5348
5349 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5350
5351 /* Instruction is of form:
5352
5353 <op><cond> rd, [rn,] #imm
5354
5355 Rewrite as:
5356
5357 Preparation: tmp1, tmp2 <- r0, r1;
5358 r0, r1 <- rd, rn
5359 Insn: <op><cond> r0, r1, #imm
5360 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5361 */
5362
5363 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5364 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5365 rn_val = displaced_read_reg (regs, dsc, rn);
5366 rd_val = displaced_read_reg (regs, dsc, rd);
5367 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5368 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5369 dsc->rd = rd;
5370
5371 dsc->modinsn[0] = insn1;
5372 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5373 dsc->numinsns = 2;
5374
5375 dsc->cleanup = &cleanup_alu_imm;
5376
5377 return 0;
5378 }
5379
5380 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5381
5382 static void
5383 cleanup_alu_reg (struct gdbarch *gdbarch,
5384 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5385 {
5386 ULONGEST rd_val;
5387 int i;
5388
5389 rd_val = displaced_read_reg (regs, dsc, 0);
5390
5391 for (i = 0; i < 3; i++)
5392 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5393
5394 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5395 }
5396
5397 static void
5398 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5399 arm_displaced_step_copy_insn_closure *dsc,
5400 unsigned int rd, unsigned int rn, unsigned int rm)
5401 {
5402 ULONGEST rd_val, rn_val, rm_val;
5403
5404 /* Instruction is of form:
5405
5406 <op><cond> rd, [rn,] rm [, <shift>]
5407
5408 Rewrite as:
5409
5410 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5411 r0, r1, r2 <- rd, rn, rm
5412 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5413 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5414 */
5415
5416 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5417 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5418 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5419 rd_val = displaced_read_reg (regs, dsc, rd);
5420 rn_val = displaced_read_reg (regs, dsc, rn);
5421 rm_val = displaced_read_reg (regs, dsc, rm);
5422 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5423 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5424 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5425 dsc->rd = rd;
5426
5427 dsc->cleanup = &cleanup_alu_reg;
5428 }
5429
5430 static int
5431 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5432 arm_displaced_step_copy_insn_closure *dsc)
5433 {
5434 unsigned int op = bits (insn, 21, 24);
5435 int is_mov = (op == 0xd);
5436
5437 if (!insn_references_pc (insn, 0x000ff00ful))
5438 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5439
5440 displaced_debug_printf ("copying reg %s insn %.8lx",
5441 is_mov ? "move" : "ALU", (unsigned long) insn);
5442
5443 if (is_mov)
5444 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5445 else
5446 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5447
5448 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5449 bits (insn, 0, 3));
5450 return 0;
5451 }
5452
5453 static int
5454 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5455 struct regcache *regs,
5456 arm_displaced_step_copy_insn_closure *dsc)
5457 {
5458 unsigned rm, rd;
5459
5460 rm = bits (insn, 3, 6);
5461 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5462
5463 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5464 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5465
5466 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5467
5468 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5469
5470 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5471
5472 return 0;
5473 }
5474
5475 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5476
5477 static void
5478 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5479 struct regcache *regs,
5480 arm_displaced_step_copy_insn_closure *dsc)
5481 {
5482 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5483 int i;
5484
5485 for (i = 0; i < 4; i++)
5486 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5487
5488 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5489 }
5490
5491 static void
5492 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5493 arm_displaced_step_copy_insn_closure *dsc,
5494 unsigned int rd, unsigned int rn, unsigned int rm,
5495 unsigned rs)
5496 {
5497 int i;
5498 ULONGEST rd_val, rn_val, rm_val, rs_val;
5499
5500 /* Instruction is of form:
5501
5502 <op><cond> rd, [rn,] rm, <shift> rs
5503
5504 Rewrite as:
5505
5506 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5507 r0, r1, r2, r3 <- rd, rn, rm, rs
5508 Insn: <op><cond> r0, r1, r2, <shift> r3
5509 Cleanup: tmp5 <- r0
5510 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5511 rd <- tmp5
5512 */
5513
5514 for (i = 0; i < 4; i++)
5515 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5516
5517 rd_val = displaced_read_reg (regs, dsc, rd);
5518 rn_val = displaced_read_reg (regs, dsc, rn);
5519 rm_val = displaced_read_reg (regs, dsc, rm);
5520 rs_val = displaced_read_reg (regs, dsc, rs);
5521 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5522 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5523 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5524 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5525 dsc->rd = rd;
5526 dsc->cleanup = &cleanup_alu_shifted_reg;
5527 }
5528
5529 static int
5530 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5531 struct regcache *regs,
5532 arm_displaced_step_copy_insn_closure *dsc)
5533 {
5534 unsigned int op = bits (insn, 21, 24);
5535 int is_mov = (op == 0xd);
5536 unsigned int rd, rn, rm, rs;
5537
5538 if (!insn_references_pc (insn, 0x000fff0ful))
5539 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5540
5541 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5542 is_mov ? "move" : "ALU",
5543 (unsigned long) insn);
5544
5545 rn = bits (insn, 16, 19);
5546 rm = bits (insn, 0, 3);
5547 rs = bits (insn, 8, 11);
5548 rd = bits (insn, 12, 15);
5549
5550 if (is_mov)
5551 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5552 else
5553 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5554
5555 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5556
5557 return 0;
5558 }
5559
5560 /* Clean up load instructions. */
5561
5562 static void
5563 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5564 arm_displaced_step_copy_insn_closure *dsc)
5565 {
5566 ULONGEST rt_val, rt_val2 = 0, rn_val;
5567
5568 rt_val = displaced_read_reg (regs, dsc, 0);
5569 if (dsc->u.ldst.xfersize == 8)
5570 rt_val2 = displaced_read_reg (regs, dsc, 1);
5571 rn_val = displaced_read_reg (regs, dsc, 2);
5572
5573 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5574 if (dsc->u.ldst.xfersize > 4)
5575 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5576 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5577 if (!dsc->u.ldst.immed)
5578 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5579
5580 /* Handle register writeback. */
5581 if (dsc->u.ldst.writeback)
5582 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5583 /* Put result in right place. */
5584 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5585 if (dsc->u.ldst.xfersize == 8)
5586 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5587 }
5588
5589 /* Clean up store instructions. */
5590
5591 static void
5592 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5593 arm_displaced_step_copy_insn_closure *dsc)
5594 {
5595 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5596
5597 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5598 if (dsc->u.ldst.xfersize > 4)
5599 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5600 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5601 if (!dsc->u.ldst.immed)
5602 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5603 if (!dsc->u.ldst.restore_r4)
5604 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5605
5606 /* Writeback. */
5607 if (dsc->u.ldst.writeback)
5608 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5609 }
5610
5611 /* Copy "extra" load/store instructions. These are halfword/doubleword
5612 transfers, which have a different encoding to byte/word transfers. */
5613
5614 static int
5615 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5616 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5617 {
5618 unsigned int op1 = bits (insn, 20, 24);
5619 unsigned int op2 = bits (insn, 5, 6);
5620 unsigned int rt = bits (insn, 12, 15);
5621 unsigned int rn = bits (insn, 16, 19);
5622 unsigned int rm = bits (insn, 0, 3);
5623 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5624 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5625 int immed = (op1 & 0x4) != 0;
5626 int opcode;
5627 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5628
5629 if (!insn_references_pc (insn, 0x000ff00ful))
5630 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5631
5632 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5633 unprivileged ? "unprivileged " : "",
5634 (unsigned long) insn);
5635
5636 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5637
5638 if (opcode < 0)
5639 internal_error (__FILE__, __LINE__,
5640 _("copy_extra_ld_st: instruction decode error"));
5641
5642 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5643 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5644 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5645 if (!immed)
5646 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5647
5648 rt_val = displaced_read_reg (regs, dsc, rt);
5649 if (bytesize[opcode] == 8)
5650 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5651 rn_val = displaced_read_reg (regs, dsc, rn);
5652 if (!immed)
5653 rm_val = displaced_read_reg (regs, dsc, rm);
5654
5655 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5656 if (bytesize[opcode] == 8)
5657 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5658 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5659 if (!immed)
5660 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5661
5662 dsc->rd = rt;
5663 dsc->u.ldst.xfersize = bytesize[opcode];
5664 dsc->u.ldst.rn = rn;
5665 dsc->u.ldst.immed = immed;
5666 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5667 dsc->u.ldst.restore_r4 = 0;
5668
5669 if (immed)
5670 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5671 ->
5672 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5673 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5674 else
5675 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5676 ->
5677 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5678 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5679
5680 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5681
5682 return 0;
5683 }
5684
5685 /* Copy byte/half word/word loads and stores. */
5686
5687 static void
5688 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5689 arm_displaced_step_copy_insn_closure *dsc, int load,
5690 int immed, int writeback, int size, int usermode,
5691 int rt, int rm, int rn)
5692 {
5693 ULONGEST rt_val, rn_val, rm_val = 0;
5694
5695 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5696 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5697 if (!immed)
5698 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5699 if (!load)
5700 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5701
5702 rt_val = displaced_read_reg (regs, dsc, rt);
5703 rn_val = displaced_read_reg (regs, dsc, rn);
5704 if (!immed)
5705 rm_val = displaced_read_reg (regs, dsc, rm);
5706
5707 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5708 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5709 if (!immed)
5710 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5711 dsc->rd = rt;
5712 dsc->u.ldst.xfersize = size;
5713 dsc->u.ldst.rn = rn;
5714 dsc->u.ldst.immed = immed;
5715 dsc->u.ldst.writeback = writeback;
5716
5717 /* To write PC we can do:
5718
5719 Before this sequence of instructions:
5720 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5721 r2 is the Rn value got from displaced_read_reg.
5722
5723 Insn1: push {pc} Write address of STR instruction + offset on stack
5724 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5725 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5726 = addr(Insn1) + offset - addr(Insn3) - 8
5727 = offset - 16
5728 Insn4: add r4, r4, #8 r4 = offset - 8
5729 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5730 = from + offset
5731 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5732
5733 Otherwise we don't know what value to write for PC, since the offset is
5734 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5735 of this can be found in Section "Saving from r15" in
5736 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5737
5738 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5739 }
5740
5741
5742 static int
5743 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5744 uint16_t insn2, struct regcache *regs,
5745 arm_displaced_step_copy_insn_closure *dsc, int size)
5746 {
5747 unsigned int u_bit = bit (insn1, 7);
5748 unsigned int rt = bits (insn2, 12, 15);
5749 int imm12 = bits (insn2, 0, 11);
5750 ULONGEST pc_val;
5751
5752 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5753 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5754 imm12);
5755
5756 if (!u_bit)
5757 imm12 = -1 * imm12;
5758
5759 /* Rewrite instruction LDR Rt imm12 into:
5760
5761 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5762
5763 LDR R0, R2, R3,
5764
5765 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5766
5767
5768 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5769 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5770 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5771
5772 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5773
5774 pc_val = pc_val & 0xfffffffc;
5775
5776 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5777 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5778
5779 dsc->rd = rt;
5780
5781 dsc->u.ldst.xfersize = size;
5782 dsc->u.ldst.immed = 0;
5783 dsc->u.ldst.writeback = 0;
5784 dsc->u.ldst.restore_r4 = 0;
5785
5786 /* LDR R0, R2, R3 */
5787 dsc->modinsn[0] = 0xf852;
5788 dsc->modinsn[1] = 0x3;
5789 dsc->numinsns = 2;
5790
5791 dsc->cleanup = &cleanup_load;
5792
5793 return 0;
5794 }
5795
5796 static int
5797 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5798 uint16_t insn2, struct regcache *regs,
5799 arm_displaced_step_copy_insn_closure *dsc,
5800 int writeback, int immed)
5801 {
5802 unsigned int rt = bits (insn2, 12, 15);
5803 unsigned int rn = bits (insn1, 0, 3);
5804 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5805 /* In LDR (register), there is also a register Rm, which is not allowed to
5806 be PC, so we don't have to check it. */
5807
5808 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5809 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5810 dsc);
5811
5812 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5813 rt, rn, insn1, insn2);
5814
5815 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5816 0, rt, rm, rn);
5817
5818 dsc->u.ldst.restore_r4 = 0;
5819
5820 if (immed)
5821 /* ldr[b]<cond> rt, [rn, #imm], etc.
5822 ->
5823 ldr[b]<cond> r0, [r2, #imm]. */
5824 {
5825 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5826 dsc->modinsn[1] = insn2 & 0x0fff;
5827 }
5828 else
5829 /* ldr[b]<cond> rt, [rn, rm], etc.
5830 ->
5831 ldr[b]<cond> r0, [r2, r3]. */
5832 {
5833 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5834 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5835 }
5836
5837 dsc->numinsns = 2;
5838
5839 return 0;
5840 }
5841
5842
5843 static int
5844 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5845 struct regcache *regs,
5846 arm_displaced_step_copy_insn_closure *dsc,
5847 int load, int size, int usermode)
5848 {
5849 int immed = !bit (insn, 25);
5850 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5851 unsigned int rt = bits (insn, 12, 15);
5852 unsigned int rn = bits (insn, 16, 19);
5853 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5854
5855 if (!insn_references_pc (insn, 0x000ff00ful))
5856 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5857
5858 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5859 load ? (size == 1 ? "ldrb" : "ldr")
5860 : (size == 1 ? "strb" : "str"),
5861 usermode ? "t" : "",
5862 rt, rn,
5863 (unsigned long) insn);
5864
5865 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5866 usermode, rt, rm, rn);
5867
5868 if (load || rt != ARM_PC_REGNUM)
5869 {
5870 dsc->u.ldst.restore_r4 = 0;
5871
5872 if (immed)
5873 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5874 ->
5875 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5876 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5877 else
5878 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5879 ->
5880 {ldr,str}[b]<cond> r0, [r2, r3]. */
5881 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5882 }
5883 else
5884 {
5885 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5886 dsc->u.ldst.restore_r4 = 1;
5887 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5888 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5889 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5890 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5891 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5892
5893 /* As above. */
5894 if (immed)
5895 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5896 else
5897 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5898
5899 dsc->numinsns = 6;
5900 }
5901
5902 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5903
5904 return 0;
5905 }
5906
5907 /* Cleanup LDM instructions with fully-populated register list. This is an
5908 unfortunate corner case: it's impossible to implement correctly by modifying
5909 the instruction. The issue is as follows: we have an instruction,
5910
5911 ldm rN, {r0-r15}
5912
5913 which we must rewrite to avoid loading PC. A possible solution would be to
5914 do the load in two halves, something like (with suitable cleanup
5915 afterwards):
5916
5917 mov r8, rN
5918 ldm[id][ab] r8!, {r0-r7}
5919 str r7, <temp>
5920 ldm[id][ab] r8, {r7-r14}
5921 <bkpt>
5922
5923 but at present there's no suitable place for <temp>, since the scratch space
5924 is overwritten before the cleanup routine is called. For now, we simply
5925 emulate the instruction. */
5926
5927 static void
5928 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5929 arm_displaced_step_copy_insn_closure *dsc)
5930 {
5931 int inc = dsc->u.block.increment;
5932 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5933 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5934 uint32_t regmask = dsc->u.block.regmask;
5935 int regno = inc ? 0 : 15;
5936 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5937 int exception_return = dsc->u.block.load && dsc->u.block.user
5938 && (regmask & 0x8000) != 0;
5939 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5940 int do_transfer = condition_true (dsc->u.block.cond, status);
5941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5942
5943 if (!do_transfer)
5944 return;
5945
5946 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5947 sensible we can do here. Complain loudly. */
5948 if (exception_return)
5949 error (_("Cannot single-step exception return"));
5950
5951 /* We don't handle any stores here for now. */
5952 gdb_assert (dsc->u.block.load != 0);
5953
5954 displaced_debug_printf ("emulating block transfer: %s %s %s",
5955 dsc->u.block.load ? "ldm" : "stm",
5956 dsc->u.block.increment ? "inc" : "dec",
5957 dsc->u.block.before ? "before" : "after");
5958
5959 while (regmask)
5960 {
5961 uint32_t memword;
5962
5963 if (inc)
5964 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5965 regno++;
5966 else
5967 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5968 regno--;
5969
5970 xfer_addr += bump_before;
5971
5972 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5973 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5974
5975 xfer_addr += bump_after;
5976
5977 regmask &= ~(1 << regno);
5978 }
5979
5980 if (dsc->u.block.writeback)
5981 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5982 CANNOT_WRITE_PC);
5983 }
5984
5985 /* Clean up an STM which included the PC in the register list. */
5986
5987 static void
5988 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5989 arm_displaced_step_copy_insn_closure *dsc)
5990 {
5991 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5992 int store_executed = condition_true (dsc->u.block.cond, status);
5993 CORE_ADDR pc_stored_at, transferred_regs
5994 = count_one_bits (dsc->u.block.regmask);
5995 CORE_ADDR stm_insn_addr;
5996 uint32_t pc_val;
5997 long offset;
5998 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5999
6000 /* If condition code fails, there's nothing else to do. */
6001 if (!store_executed)
6002 return;
6003
6004 if (dsc->u.block.increment)
6005 {
6006 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6007
6008 if (dsc->u.block.before)
6009 pc_stored_at += 4;
6010 }
6011 else
6012 {
6013 pc_stored_at = dsc->u.block.xfer_addr;
6014
6015 if (dsc->u.block.before)
6016 pc_stored_at -= 4;
6017 }
6018
6019 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6020 stm_insn_addr = dsc->scratch_base;
6021 offset = pc_val - stm_insn_addr;
6022
6023 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6024 offset);
6025
6026 /* Rewrite the stored PC to the proper value for the non-displaced original
6027 instruction. */
6028 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6029 dsc->insn_addr + offset);
6030 }
6031
6032 /* Clean up an LDM which includes the PC in the register list. We clumped all
6033 the registers in the transferred list into a contiguous range r0...rX (to
6034 avoid loading PC directly and losing control of the debugged program), so we
6035 must undo that here. */
6036
6037 static void
6038 cleanup_block_load_pc (struct gdbarch *gdbarch,
6039 struct regcache *regs,
6040 arm_displaced_step_copy_insn_closure *dsc)
6041 {
6042 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6043 int load_executed = condition_true (dsc->u.block.cond, status);
6044 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6045 unsigned int regs_loaded = count_one_bits (mask);
6046 unsigned int num_to_shuffle = regs_loaded, clobbered;
6047
6048 /* The method employed here will fail if the register list is fully populated
6049 (we need to avoid loading PC directly). */
6050 gdb_assert (num_to_shuffle < 16);
6051
6052 if (!load_executed)
6053 return;
6054
6055 clobbered = (1 << num_to_shuffle) - 1;
6056
6057 while (num_to_shuffle > 0)
6058 {
6059 if ((mask & (1 << write_reg)) != 0)
6060 {
6061 unsigned int read_reg = num_to_shuffle - 1;
6062
6063 if (read_reg != write_reg)
6064 {
6065 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6066 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6067 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6068 read_reg, write_reg);
6069 }
6070 else
6071 displaced_debug_printf ("LDM: register r%d already in the right "
6072 "place", write_reg);
6073
6074 clobbered &= ~(1 << write_reg);
6075
6076 num_to_shuffle--;
6077 }
6078
6079 write_reg--;
6080 }
6081
6082 /* Restore any registers we scribbled over. */
6083 for (write_reg = 0; clobbered != 0; write_reg++)
6084 {
6085 if ((clobbered & (1 << write_reg)) != 0)
6086 {
6087 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6088 CANNOT_WRITE_PC);
6089 displaced_debug_printf ("LDM: restored clobbered register r%d",
6090 write_reg);
6091 clobbered &= ~(1 << write_reg);
6092 }
6093 }
6094
6095 /* Perform register writeback manually. */
6096 if (dsc->u.block.writeback)
6097 {
6098 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6099
6100 if (dsc->u.block.increment)
6101 new_rn_val += regs_loaded * 4;
6102 else
6103 new_rn_val -= regs_loaded * 4;
6104
6105 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6106 CANNOT_WRITE_PC);
6107 }
6108 }
6109
6110 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6111 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6112
6113 static int
6114 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6115 struct regcache *regs,
6116 arm_displaced_step_copy_insn_closure *dsc)
6117 {
6118 int load = bit (insn, 20);
6119 int user = bit (insn, 22);
6120 int increment = bit (insn, 23);
6121 int before = bit (insn, 24);
6122 int writeback = bit (insn, 21);
6123 int rn = bits (insn, 16, 19);
6124
6125 /* Block transfers which don't mention PC can be run directly
6126 out-of-line. */
6127 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6128 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6129
6130 if (rn == ARM_PC_REGNUM)
6131 {
6132 warning (_("displaced: Unpredictable LDM or STM with "
6133 "base register r15"));
6134 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6135 }
6136
6137 displaced_debug_printf ("copying block transfer insn %.8lx",
6138 (unsigned long) insn);
6139
6140 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6141 dsc->u.block.rn = rn;
6142
6143 dsc->u.block.load = load;
6144 dsc->u.block.user = user;
6145 dsc->u.block.increment = increment;
6146 dsc->u.block.before = before;
6147 dsc->u.block.writeback = writeback;
6148 dsc->u.block.cond = bits (insn, 28, 31);
6149
6150 dsc->u.block.regmask = insn & 0xffff;
6151
6152 if (load)
6153 {
6154 if ((insn & 0xffff) == 0xffff)
6155 {
6156 /* LDM with a fully-populated register list. This case is
6157 particularly tricky. Implement for now by fully emulating the
6158 instruction (which might not behave perfectly in all cases, but
6159 these instructions should be rare enough for that not to matter
6160 too much). */
6161 dsc->modinsn[0] = ARM_NOP;
6162
6163 dsc->cleanup = &cleanup_block_load_all;
6164 }
6165 else
6166 {
6167 /* LDM of a list of registers which includes PC. Implement by
6168 rewriting the list of registers to be transferred into a
6169 contiguous chunk r0...rX before doing the transfer, then shuffling
6170 registers into the correct places in the cleanup routine. */
6171 unsigned int regmask = insn & 0xffff;
6172 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6173 unsigned int i;
6174
6175 for (i = 0; i < num_in_list; i++)
6176 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6177
6178 /* Writeback makes things complicated. We need to avoid clobbering
6179 the base register with one of the registers in our modified
6180 register list, but just using a different register can't work in
6181 all cases, e.g.:
6182
6183 ldm r14!, {r0-r13,pc}
6184
6185 which would need to be rewritten as:
6186
6187 ldm rN!, {r0-r14}
6188
6189 but that can't work, because there's no free register for N.
6190
6191 Solve this by turning off the writeback bit, and emulating
6192 writeback manually in the cleanup routine. */
6193
6194 if (writeback)
6195 insn &= ~(1 << 21);
6196
6197 new_regmask = (1 << num_in_list) - 1;
6198
6199 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6200 "%.4x, modified list %.4x",
6201 rn, writeback ? "!" : "",
6202 (int) insn & 0xffff, new_regmask);
6203
6204 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6205
6206 dsc->cleanup = &cleanup_block_load_pc;
6207 }
6208 }
6209 else
6210 {
6211 /* STM of a list of registers which includes PC. Run the instruction
6212 as-is, but out of line: this will store the wrong value for the PC,
6213 so we must manually fix up the memory in the cleanup routine.
6214 Doing things this way has the advantage that we can auto-detect
6215 the offset of the PC write (which is architecture-dependent) in
6216 the cleanup routine. */
6217 dsc->modinsn[0] = insn;
6218
6219 dsc->cleanup = &cleanup_block_store_pc;
6220 }
6221
6222 return 0;
6223 }
6224
6225 static int
6226 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6227 struct regcache *regs,
6228 arm_displaced_step_copy_insn_closure *dsc)
6229 {
6230 int rn = bits (insn1, 0, 3);
6231 int load = bit (insn1, 4);
6232 int writeback = bit (insn1, 5);
6233
6234 /* Block transfers which don't mention PC can be run directly
6235 out-of-line. */
6236 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6237 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6238
6239 if (rn == ARM_PC_REGNUM)
6240 {
6241 warning (_("displaced: Unpredictable LDM or STM with "
6242 "base register r15"));
6243 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6244 "unpredictable ldm/stm", dsc);
6245 }
6246
6247 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6248 insn1, insn2);
6249
6250 /* Clear bit 13, since it should be always zero. */
6251 dsc->u.block.regmask = (insn2 & 0xdfff);
6252 dsc->u.block.rn = rn;
6253
6254 dsc->u.block.load = load;
6255 dsc->u.block.user = 0;
6256 dsc->u.block.increment = bit (insn1, 7);
6257 dsc->u.block.before = bit (insn1, 8);
6258 dsc->u.block.writeback = writeback;
6259 dsc->u.block.cond = INST_AL;
6260 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6261
6262 if (load)
6263 {
6264 if (dsc->u.block.regmask == 0xffff)
6265 {
6266 /* This branch is impossible to happen. */
6267 gdb_assert (0);
6268 }
6269 else
6270 {
6271 unsigned int regmask = dsc->u.block.regmask;
6272 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6273 unsigned int i;
6274
6275 for (i = 0; i < num_in_list; i++)
6276 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6277
6278 if (writeback)
6279 insn1 &= ~(1 << 5);
6280
6281 new_regmask = (1 << num_in_list) - 1;
6282
6283 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6284 "%.4x, modified list %.4x",
6285 rn, writeback ? "!" : "",
6286 (int) dsc->u.block.regmask, new_regmask);
6287
6288 dsc->modinsn[0] = insn1;
6289 dsc->modinsn[1] = (new_regmask & 0xffff);
6290 dsc->numinsns = 2;
6291
6292 dsc->cleanup = &cleanup_block_load_pc;
6293 }
6294 }
6295 else
6296 {
6297 dsc->modinsn[0] = insn1;
6298 dsc->modinsn[1] = insn2;
6299 dsc->numinsns = 2;
6300 dsc->cleanup = &cleanup_block_store_pc;
6301 }
6302 return 0;
6303 }
6304
6305 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6306 This is used to avoid a dependency on BFD's bfd_endian enum. */
6307
6308 ULONGEST
6309 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6310 int byte_order)
6311 {
6312 return read_memory_unsigned_integer (memaddr, len,
6313 (enum bfd_endian) byte_order);
6314 }
6315
6316 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6317
6318 CORE_ADDR
6319 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6320 CORE_ADDR val)
6321 {
6322 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6323 }
6324
6325 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6326
6327 static CORE_ADDR
6328 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6329 {
6330 return 0;
6331 }
6332
6333 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6334
6335 int
6336 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6337 {
6338 return arm_is_thumb (self->regcache);
6339 }
6340
6341 /* single_step() is called just before we want to resume the inferior,
6342 if we want to single-step it but there is no hardware or kernel
6343 single-step support. We find the target of the coming instructions
6344 and breakpoint them. */
6345
6346 std::vector<CORE_ADDR>
6347 arm_software_single_step (struct regcache *regcache)
6348 {
6349 struct gdbarch *gdbarch = regcache->arch ();
6350 struct arm_get_next_pcs next_pcs_ctx;
6351
6352 arm_get_next_pcs_ctor (&next_pcs_ctx,
6353 &arm_get_next_pcs_ops,
6354 gdbarch_byte_order (gdbarch),
6355 gdbarch_byte_order_for_code (gdbarch),
6356 0,
6357 regcache);
6358
6359 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6360
6361 for (CORE_ADDR &pc_ref : next_pcs)
6362 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6363
6364 return next_pcs;
6365 }
6366
6367 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6368 for Linux, where some SVC instructions must be treated specially. */
6369
6370 static void
6371 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6372 arm_displaced_step_copy_insn_closure *dsc)
6373 {
6374 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6375
6376 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6377 (unsigned long) resume_addr);
6378
6379 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6380 }
6381
6382
6383 /* Common copy routine for svc instruction. */
6384
6385 static int
6386 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6387 arm_displaced_step_copy_insn_closure *dsc)
6388 {
6389 /* Preparation: none.
6390 Insn: unmodified svc.
6391 Cleanup: pc <- insn_addr + insn_size. */
6392
6393 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6394 instruction. */
6395 dsc->wrote_to_pc = 1;
6396
6397 /* Allow OS-specific code to override SVC handling. */
6398 if (dsc->u.svc.copy_svc_os)
6399 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6400 else
6401 {
6402 dsc->cleanup = &cleanup_svc;
6403 return 0;
6404 }
6405 }
6406
6407 static int
6408 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6409 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6410 {
6411
6412 displaced_debug_printf ("copying svc insn %.8lx",
6413 (unsigned long) insn);
6414
6415 dsc->modinsn[0] = insn;
6416
6417 return install_svc (gdbarch, regs, dsc);
6418 }
6419
6420 static int
6421 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6422 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6423 {
6424
6425 displaced_debug_printf ("copying svc insn %.4x", insn);
6426
6427 dsc->modinsn[0] = insn;
6428
6429 return install_svc (gdbarch, regs, dsc);
6430 }
6431
6432 /* Copy undefined instructions. */
6433
6434 static int
6435 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6436 arm_displaced_step_copy_insn_closure *dsc)
6437 {
6438 displaced_debug_printf ("copying undefined insn %.8lx",
6439 (unsigned long) insn);
6440
6441 dsc->modinsn[0] = insn;
6442
6443 return 0;
6444 }
6445
6446 static int
6447 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6448 arm_displaced_step_copy_insn_closure *dsc)
6449 {
6450
6451 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6452 (unsigned short) insn1, (unsigned short) insn2);
6453
6454 dsc->modinsn[0] = insn1;
6455 dsc->modinsn[1] = insn2;
6456 dsc->numinsns = 2;
6457
6458 return 0;
6459 }
6460
6461 /* Copy unpredictable instructions. */
6462
6463 static int
6464 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6465 arm_displaced_step_copy_insn_closure *dsc)
6466 {
6467 displaced_debug_printf ("copying unpredictable insn %.8lx",
6468 (unsigned long) insn);
6469
6470 dsc->modinsn[0] = insn;
6471
6472 return 0;
6473 }
6474
6475 /* The decode_* functions are instruction decoding helpers. They mostly follow
6476 the presentation in the ARM ARM. */
6477
6478 static int
6479 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6480 struct regcache *regs,
6481 arm_displaced_step_copy_insn_closure *dsc)
6482 {
6483 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6484 unsigned int rn = bits (insn, 16, 19);
6485
6486 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6487 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6488 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6489 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6490 else if ((op1 & 0x60) == 0x20)
6491 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6492 else if ((op1 & 0x71) == 0x40)
6493 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6494 dsc);
6495 else if ((op1 & 0x77) == 0x41)
6496 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6497 else if ((op1 & 0x77) == 0x45)
6498 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6499 else if ((op1 & 0x77) == 0x51)
6500 {
6501 if (rn != 0xf)
6502 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6503 else
6504 return arm_copy_unpred (gdbarch, insn, dsc);
6505 }
6506 else if ((op1 & 0x77) == 0x55)
6507 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6508 else if (op1 == 0x57)
6509 switch (op2)
6510 {
6511 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6512 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6513 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6514 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6515 default: return arm_copy_unpred (gdbarch, insn, dsc);
6516 }
6517 else if ((op1 & 0x63) == 0x43)
6518 return arm_copy_unpred (gdbarch, insn, dsc);
6519 else if ((op2 & 0x1) == 0x0)
6520 switch (op1 & ~0x80)
6521 {
6522 case 0x61:
6523 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6524 case 0x65:
6525 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6526 case 0x71: case 0x75:
6527 /* pld/pldw reg. */
6528 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6529 case 0x63: case 0x67: case 0x73: case 0x77:
6530 return arm_copy_unpred (gdbarch, insn, dsc);
6531 default:
6532 return arm_copy_undef (gdbarch, insn, dsc);
6533 }
6534 else
6535 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6536 }
6537
6538 static int
6539 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6540 struct regcache *regs,
6541 arm_displaced_step_copy_insn_closure *dsc)
6542 {
6543 if (bit (insn, 27) == 0)
6544 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6545 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6546 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6547 {
6548 case 0x0: case 0x2:
6549 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6550
6551 case 0x1: case 0x3:
6552 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6553
6554 case 0x4: case 0x5: case 0x6: case 0x7:
6555 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6556
6557 case 0x8:
6558 switch ((insn & 0xe00000) >> 21)
6559 {
6560 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6561 /* stc/stc2. */
6562 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6563
6564 case 0x2:
6565 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6566
6567 default:
6568 return arm_copy_undef (gdbarch, insn, dsc);
6569 }
6570
6571 case 0x9:
6572 {
6573 int rn_f = (bits (insn, 16, 19) == 0xf);
6574 switch ((insn & 0xe00000) >> 21)
6575 {
6576 case 0x1: case 0x3:
6577 /* ldc/ldc2 imm (undefined for rn == pc). */
6578 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6579 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6580
6581 case 0x2:
6582 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6583
6584 case 0x4: case 0x5: case 0x6: case 0x7:
6585 /* ldc/ldc2 lit (undefined for rn != pc). */
6586 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6587 : arm_copy_undef (gdbarch, insn, dsc);
6588
6589 default:
6590 return arm_copy_undef (gdbarch, insn, dsc);
6591 }
6592 }
6593
6594 case 0xa:
6595 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6596
6597 case 0xb:
6598 if (bits (insn, 16, 19) == 0xf)
6599 /* ldc/ldc2 lit. */
6600 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6601 else
6602 return arm_copy_undef (gdbarch, insn, dsc);
6603
6604 case 0xc:
6605 if (bit (insn, 4))
6606 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6607 else
6608 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6609
6610 case 0xd:
6611 if (bit (insn, 4))
6612 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6613 else
6614 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6615
6616 default:
6617 return arm_copy_undef (gdbarch, insn, dsc);
6618 }
6619 }
6620
6621 /* Decode miscellaneous instructions in dp/misc encoding space. */
6622
6623 static int
6624 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6625 struct regcache *regs,
6626 arm_displaced_step_copy_insn_closure *dsc)
6627 {
6628 unsigned int op2 = bits (insn, 4, 6);
6629 unsigned int op = bits (insn, 21, 22);
6630
6631 switch (op2)
6632 {
6633 case 0x0:
6634 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6635
6636 case 0x1:
6637 if (op == 0x1) /* bx. */
6638 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6639 else if (op == 0x3)
6640 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6641 else
6642 return arm_copy_undef (gdbarch, insn, dsc);
6643
6644 case 0x2:
6645 if (op == 0x1)
6646 /* Not really supported. */
6647 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6648 else
6649 return arm_copy_undef (gdbarch, insn, dsc);
6650
6651 case 0x3:
6652 if (op == 0x1)
6653 return arm_copy_bx_blx_reg (gdbarch, insn,
6654 regs, dsc); /* blx register. */
6655 else
6656 return arm_copy_undef (gdbarch, insn, dsc);
6657
6658 case 0x5:
6659 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6660
6661 case 0x7:
6662 if (op == 0x1)
6663 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6664 else if (op == 0x3)
6665 /* Not really supported. */
6666 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6667 /* Fall through. */
6668
6669 default:
6670 return arm_copy_undef (gdbarch, insn, dsc);
6671 }
6672 }
6673
6674 static int
6675 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6676 struct regcache *regs,
6677 arm_displaced_step_copy_insn_closure *dsc)
6678 {
6679 if (bit (insn, 25))
6680 switch (bits (insn, 20, 24))
6681 {
6682 case 0x10:
6683 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6684
6685 case 0x14:
6686 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6687
6688 case 0x12: case 0x16:
6689 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6690
6691 default:
6692 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6693 }
6694 else
6695 {
6696 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6697
6698 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6699 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6700 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6701 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6702 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6703 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6704 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6705 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6706 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6707 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6708 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6709 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6710 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6711 /* 2nd arg means "unprivileged". */
6712 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6713 dsc);
6714 }
6715
6716 /* Should be unreachable. */
6717 return 1;
6718 }
6719
6720 static int
6721 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6722 struct regcache *regs,
6723 arm_displaced_step_copy_insn_closure *dsc)
6724 {
6725 int a = bit (insn, 25), b = bit (insn, 4);
6726 uint32_t op1 = bits (insn, 20, 24);
6727
6728 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6729 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6730 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6731 else if ((!a && (op1 & 0x17) == 0x02)
6732 || (a && (op1 & 0x17) == 0x02 && !b))
6733 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6734 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6735 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6736 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6737 else if ((!a && (op1 & 0x17) == 0x03)
6738 || (a && (op1 & 0x17) == 0x03 && !b))
6739 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6740 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6741 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6742 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6743 else if ((!a && (op1 & 0x17) == 0x06)
6744 || (a && (op1 & 0x17) == 0x06 && !b))
6745 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6746 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6747 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6748 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6749 else if ((!a && (op1 & 0x17) == 0x07)
6750 || (a && (op1 & 0x17) == 0x07 && !b))
6751 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6752
6753 /* Should be unreachable. */
6754 return 1;
6755 }
6756
6757 static int
6758 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6759 arm_displaced_step_copy_insn_closure *dsc)
6760 {
6761 switch (bits (insn, 20, 24))
6762 {
6763 case 0x00: case 0x01: case 0x02: case 0x03:
6764 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6765
6766 case 0x04: case 0x05: case 0x06: case 0x07:
6767 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6768
6769 case 0x08: case 0x09: case 0x0a: case 0x0b:
6770 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6771 return arm_copy_unmodified (gdbarch, insn,
6772 "decode/pack/unpack/saturate/reverse", dsc);
6773
6774 case 0x18:
6775 if (bits (insn, 5, 7) == 0) /* op2. */
6776 {
6777 if (bits (insn, 12, 15) == 0xf)
6778 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6779 else
6780 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6781 }
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc);
6784
6785 case 0x1a: case 0x1b:
6786 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6787 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6788 else
6789 return arm_copy_undef (gdbarch, insn, dsc);
6790
6791 case 0x1c: case 0x1d:
6792 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6793 {
6794 if (bits (insn, 0, 3) == 0xf)
6795 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6796 else
6797 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6798 }
6799 else
6800 return arm_copy_undef (gdbarch, insn, dsc);
6801
6802 case 0x1e: case 0x1f:
6803 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6804 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6805 else
6806 return arm_copy_undef (gdbarch, insn, dsc);
6807 }
6808
6809 /* Should be unreachable. */
6810 return 1;
6811 }
6812
6813 static int
6814 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6815 struct regcache *regs,
6816 arm_displaced_step_copy_insn_closure *dsc)
6817 {
6818 if (bit (insn, 25))
6819 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6820 else
6821 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6822 }
6823
6824 static int
6825 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6826 struct regcache *regs,
6827 arm_displaced_step_copy_insn_closure *dsc)
6828 {
6829 unsigned int opcode = bits (insn, 20, 24);
6830
6831 switch (opcode)
6832 {
6833 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6834 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6835
6836 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6837 case 0x12: case 0x16:
6838 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6839
6840 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6841 case 0x13: case 0x17:
6842 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6843
6844 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6845 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6846 /* Note: no writeback for these instructions. Bit 25 will always be
6847 zero though (via caller), so the following works OK. */
6848 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6849 }
6850
6851 /* Should be unreachable. */
6852 return 1;
6853 }
6854
6855 /* Decode shifted register instructions. */
6856
6857 static int
6858 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6859 uint16_t insn2, struct regcache *regs,
6860 arm_displaced_step_copy_insn_closure *dsc)
6861 {
6862 /* PC is only allowed to be used in instruction MOV. */
6863
6864 unsigned int op = bits (insn1, 5, 8);
6865 unsigned int rn = bits (insn1, 0, 3);
6866
6867 if (op == 0x2 && rn == 0xf) /* MOV */
6868 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6869 else
6870 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6871 "dp (shift reg)", dsc);
6872 }
6873
6874
6875 /* Decode extension register load/store. Exactly the same as
6876 arm_decode_ext_reg_ld_st. */
6877
6878 static int
6879 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6880 uint16_t insn2, struct regcache *regs,
6881 arm_displaced_step_copy_insn_closure *dsc)
6882 {
6883 unsigned int opcode = bits (insn1, 4, 8);
6884
6885 switch (opcode)
6886 {
6887 case 0x04: case 0x05:
6888 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6889 "vfp/neon vmov", dsc);
6890
6891 case 0x08: case 0x0c: /* 01x00 */
6892 case 0x0a: case 0x0e: /* 01x10 */
6893 case 0x12: case 0x16: /* 10x10 */
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 "vfp/neon vstm/vpush", dsc);
6896
6897 case 0x09: case 0x0d: /* 01x01 */
6898 case 0x0b: case 0x0f: /* 01x11 */
6899 case 0x13: case 0x17: /* 10x11 */
6900 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6901 "vfp/neon vldm/vpop", dsc);
6902
6903 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6905 "vstr", dsc);
6906 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6907 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6908 }
6909
6910 /* Should be unreachable. */
6911 return 1;
6912 }
6913
6914 static int
6915 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6916 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6917 {
6918 unsigned int op1 = bits (insn, 20, 25);
6919 int op = bit (insn, 4);
6920 unsigned int coproc = bits (insn, 8, 11);
6921
6922 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6923 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6924 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6925 && (coproc & 0xe) != 0xa)
6926 /* stc/stc2. */
6927 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6928 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6929 && (coproc & 0xe) != 0xa)
6930 /* ldc/ldc2 imm/lit. */
6931 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6932 else if ((op1 & 0x3e) == 0x00)
6933 return arm_copy_undef (gdbarch, insn, dsc);
6934 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6935 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6936 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6937 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6938 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6939 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6940 else if ((op1 & 0x30) == 0x20 && !op)
6941 {
6942 if ((coproc & 0xe) == 0xa)
6943 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6944 else
6945 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6946 }
6947 else if ((op1 & 0x30) == 0x20 && op)
6948 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6949 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6950 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6951 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6952 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6953 else if ((op1 & 0x30) == 0x30)
6954 return arm_copy_svc (gdbarch, insn, regs, dsc);
6955 else
6956 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6957 }
6958
6959 static int
6960 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6961 uint16_t insn2, struct regcache *regs,
6962 arm_displaced_step_copy_insn_closure *dsc)
6963 {
6964 unsigned int coproc = bits (insn2, 8, 11);
6965 unsigned int bit_5_8 = bits (insn1, 5, 8);
6966 unsigned int bit_9 = bit (insn1, 9);
6967 unsigned int bit_4 = bit (insn1, 4);
6968
6969 if (bit_9 == 0)
6970 {
6971 if (bit_5_8 == 2)
6972 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6973 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6974 dsc);
6975 else if (bit_5_8 == 0) /* UNDEFINED. */
6976 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6977 else
6978 {
6979 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6980 if ((coproc & 0xe) == 0xa)
6981 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6982 dsc);
6983 else /* coproc is not 101x. */
6984 {
6985 if (bit_4 == 0) /* STC/STC2. */
6986 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6987 "stc/stc2", dsc);
6988 else /* LDC/LDC2 {literal, immediate}. */
6989 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6990 regs, dsc);
6991 }
6992 }
6993 }
6994 else
6995 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6996
6997 return 0;
6998 }
6999
7000 static void
7001 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7002 arm_displaced_step_copy_insn_closure *dsc, int rd)
7003 {
7004 /* ADR Rd, #imm
7005
7006 Rewrite as:
7007
7008 Preparation: Rd <- PC
7009 Insn: ADD Rd, #imm
7010 Cleanup: Null.
7011 */
7012
7013 /* Rd <- PC */
7014 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7015 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7016 }
7017
7018 static int
7019 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7020 arm_displaced_step_copy_insn_closure *dsc,
7021 int rd, unsigned int imm)
7022 {
7023
7024 /* Encoding T2: ADDS Rd, #imm */
7025 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7026
7027 install_pc_relative (gdbarch, regs, dsc, rd);
7028
7029 return 0;
7030 }
7031
7032 static int
7033 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7034 struct regcache *regs,
7035 arm_displaced_step_copy_insn_closure *dsc)
7036 {
7037 unsigned int rd = bits (insn, 8, 10);
7038 unsigned int imm8 = bits (insn, 0, 7);
7039
7040 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7041 rd, imm8, insn);
7042
7043 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7044 }
7045
7046 static int
7047 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7048 uint16_t insn2, struct regcache *regs,
7049 arm_displaced_step_copy_insn_closure *dsc)
7050 {
7051 unsigned int rd = bits (insn2, 8, 11);
7052 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7053 extract raw immediate encoding rather than computing immediate. When
7054 generating ADD or SUB instruction, we can simply perform OR operation to
7055 set immediate into ADD. */
7056 unsigned int imm_3_8 = insn2 & 0x70ff;
7057 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7058
7059 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7060 rd, imm_i, imm_3_8, insn1, insn2);
7061
7062 if (bit (insn1, 7)) /* Encoding T2 */
7063 {
7064 /* Encoding T3: SUB Rd, Rd, #imm */
7065 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7066 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7067 }
7068 else /* Encoding T3 */
7069 {
7070 /* Encoding T3: ADD Rd, Rd, #imm */
7071 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7072 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7073 }
7074 dsc->numinsns = 2;
7075
7076 install_pc_relative (gdbarch, regs, dsc, rd);
7077
7078 return 0;
7079 }
7080
7081 static int
7082 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7083 struct regcache *regs,
7084 arm_displaced_step_copy_insn_closure *dsc)
7085 {
7086 unsigned int rt = bits (insn1, 8, 10);
7087 unsigned int pc;
7088 int imm8 = (bits (insn1, 0, 7) << 2);
7089
7090 /* LDR Rd, #imm8
7091
7092 Rwrite as:
7093
7094 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7095
7096 Insn: LDR R0, [R2, R3];
7097 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7098
7099 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7100
7101 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7102 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7103 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7104 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7105 /* The assembler calculates the required value of the offset from the
7106 Align(PC,4) value of this instruction to the label. */
7107 pc = pc & 0xfffffffc;
7108
7109 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7110 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7111
7112 dsc->rd = rt;
7113 dsc->u.ldst.xfersize = 4;
7114 dsc->u.ldst.rn = 0;
7115 dsc->u.ldst.immed = 0;
7116 dsc->u.ldst.writeback = 0;
7117 dsc->u.ldst.restore_r4 = 0;
7118
7119 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7120
7121 dsc->cleanup = &cleanup_load;
7122
7123 return 0;
7124 }
7125
7126 /* Copy Thumb cbnz/cbz instruction. */
7127
7128 static int
7129 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7130 struct regcache *regs,
7131 arm_displaced_step_copy_insn_closure *dsc)
7132 {
7133 int non_zero = bit (insn1, 11);
7134 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7135 CORE_ADDR from = dsc->insn_addr;
7136 int rn = bits (insn1, 0, 2);
7137 int rn_val = displaced_read_reg (regs, dsc, rn);
7138
7139 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7140 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7141 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7142 condition is false, let it be, cleanup_branch will do nothing. */
7143 if (dsc->u.branch.cond)
7144 {
7145 dsc->u.branch.cond = INST_AL;
7146 dsc->u.branch.dest = from + 4 + imm5;
7147 }
7148 else
7149 dsc->u.branch.dest = from + 2;
7150
7151 dsc->u.branch.link = 0;
7152 dsc->u.branch.exchange = 0;
7153
7154 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7155 non_zero ? "cbnz" : "cbz",
7156 rn, rn_val, insn1, dsc->u.branch.dest);
7157
7158 dsc->modinsn[0] = THUMB_NOP;
7159
7160 dsc->cleanup = &cleanup_branch;
7161 return 0;
7162 }
7163
7164 /* Copy Table Branch Byte/Halfword */
7165 static int
7166 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7167 uint16_t insn2, struct regcache *regs,
7168 arm_displaced_step_copy_insn_closure *dsc)
7169 {
7170 ULONGEST rn_val, rm_val;
7171 int is_tbh = bit (insn2, 4);
7172 CORE_ADDR halfwords = 0;
7173 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7174
7175 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7176 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7177
7178 if (is_tbh)
7179 {
7180 gdb_byte buf[2];
7181
7182 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7183 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7184 }
7185 else
7186 {
7187 gdb_byte buf[1];
7188
7189 target_read_memory (rn_val + rm_val, buf, 1);
7190 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7191 }
7192
7193 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7194 is_tbh ? "tbh" : "tbb",
7195 (unsigned int) rn_val, (unsigned int) rm_val,
7196 (unsigned int) halfwords);
7197
7198 dsc->u.branch.cond = INST_AL;
7199 dsc->u.branch.link = 0;
7200 dsc->u.branch.exchange = 0;
7201 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7202
7203 dsc->cleanup = &cleanup_branch;
7204
7205 return 0;
7206 }
7207
7208 static void
7209 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7210 arm_displaced_step_copy_insn_closure *dsc)
7211 {
7212 /* PC <- r7 */
7213 int val = displaced_read_reg (regs, dsc, 7);
7214 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7215
7216 /* r7 <- r8 */
7217 val = displaced_read_reg (regs, dsc, 8);
7218 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7219
7220 /* r8 <- tmp[0] */
7221 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7222
7223 }
7224
7225 static int
7226 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7227 struct regcache *regs,
7228 arm_displaced_step_copy_insn_closure *dsc)
7229 {
7230 dsc->u.block.regmask = insn1 & 0x00ff;
7231
7232 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7233 to :
7234
7235 (1) register list is full, that is, r0-r7 are used.
7236 Prepare: tmp[0] <- r8
7237
7238 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7239 MOV r8, r7; Move value of r7 to r8;
7240 POP {r7}; Store PC value into r7.
7241
7242 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7243
7244 (2) register list is not full, supposing there are N registers in
7245 register list (except PC, 0 <= N <= 7).
7246 Prepare: for each i, 0 - N, tmp[i] <- ri.
7247
7248 POP {r0, r1, ...., rN};
7249
7250 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7251 from tmp[] properly.
7252 */
7253 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7254 dsc->u.block.regmask, insn1);
7255
7256 if (dsc->u.block.regmask == 0xff)
7257 {
7258 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7259
7260 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7261 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7262 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7263
7264 dsc->numinsns = 3;
7265 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7266 }
7267 else
7268 {
7269 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7270 unsigned int i;
7271 unsigned int new_regmask;
7272
7273 for (i = 0; i < num_in_list + 1; i++)
7274 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7275
7276 new_regmask = (1 << (num_in_list + 1)) - 1;
7277
7278 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7279 "modified list %.4x",
7280 (int) dsc->u.block.regmask, new_regmask);
7281
7282 dsc->u.block.regmask |= 0x8000;
7283 dsc->u.block.writeback = 0;
7284 dsc->u.block.cond = INST_AL;
7285
7286 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7287
7288 dsc->cleanup = &cleanup_block_load_pc;
7289 }
7290
7291 return 0;
7292 }
7293
7294 static void
7295 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7296 struct regcache *regs,
7297 arm_displaced_step_copy_insn_closure *dsc)
7298 {
7299 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7300 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7301 int err = 0;
7302
7303 /* 16-bit thumb instructions. */
7304 switch (op_bit_12_15)
7305 {
7306 /* Shift (imme), add, subtract, move and compare. */
7307 case 0: case 1: case 2: case 3:
7308 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7309 "shift/add/sub/mov/cmp",
7310 dsc);
7311 break;
7312 case 4:
7313 switch (op_bit_10_11)
7314 {
7315 case 0: /* Data-processing */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7317 "data-processing",
7318 dsc);
7319 break;
7320 case 1: /* Special data instructions and branch and exchange. */
7321 {
7322 unsigned short op = bits (insn1, 7, 9);
7323 if (op == 6 || op == 7) /* BX or BLX */
7324 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7325 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7326 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7327 else
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7329 dsc);
7330 }
7331 break;
7332 default: /* LDR (literal) */
7333 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7334 }
7335 break;
7336 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7338 break;
7339 case 10:
7340 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7341 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7342 else /* Generate SP-relative address */
7343 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7344 break;
7345 case 11: /* Misc 16-bit instructions */
7346 {
7347 switch (bits (insn1, 8, 11))
7348 {
7349 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7350 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7351 break;
7352 case 12: case 13: /* POP */
7353 if (bit (insn1, 8)) /* PC is in register list. */
7354 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7355 else
7356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7357 break;
7358 case 15: /* If-Then, and hints */
7359 if (bits (insn1, 0, 3))
7360 /* If-Then makes up to four following instructions conditional.
7361 IT instruction itself is not conditional, so handle it as a
7362 common unmodified instruction. */
7363 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7364 dsc);
7365 else
7366 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7367 break;
7368 default:
7369 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7370 }
7371 }
7372 break;
7373 case 12:
7374 if (op_bit_10_11 < 2) /* Store multiple registers */
7375 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7376 else /* Load multiple registers */
7377 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7378 break;
7379 case 13: /* Conditional branch and supervisor call */
7380 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7381 err = thumb_copy_b (gdbarch, insn1, dsc);
7382 else
7383 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7384 break;
7385 case 14: /* Unconditional branch */
7386 err = thumb_copy_b (gdbarch, insn1, dsc);
7387 break;
7388 default:
7389 err = 1;
7390 }
7391
7392 if (err)
7393 internal_error (__FILE__, __LINE__,
7394 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7395 }
7396
7397 static int
7398 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7399 uint16_t insn1, uint16_t insn2,
7400 struct regcache *regs,
7401 arm_displaced_step_copy_insn_closure *dsc)
7402 {
7403 int rt = bits (insn2, 12, 15);
7404 int rn = bits (insn1, 0, 3);
7405 int op1 = bits (insn1, 7, 8);
7406
7407 switch (bits (insn1, 5, 6))
7408 {
7409 case 0: /* Load byte and memory hints */
7410 if (rt == 0xf) /* PLD/PLI */
7411 {
7412 if (rn == 0xf)
7413 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7414 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7415 else
7416 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "pli/pld", dsc);
7418 }
7419 else
7420 {
7421 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7422 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7423 1);
7424 else
7425 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "ldrb{reg, immediate}/ldrbt",
7427 dsc);
7428 }
7429
7430 break;
7431 case 1: /* Load halfword and memory hints. */
7432 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7433 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 "pld/unalloc memhint", dsc);
7435 else
7436 {
7437 if (rn == 0xf)
7438 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7439 2);
7440 else
7441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "ldrh/ldrht", dsc);
7443 }
7444 break;
7445 case 2: /* Load word */
7446 {
7447 int insn2_bit_8_11 = bits (insn2, 8, 11);
7448
7449 if (rn == 0xf)
7450 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7451 else if (op1 == 0x1) /* Encoding T3 */
7452 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7453 0, 1);
7454 else /* op1 == 0x0 */
7455 {
7456 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7457 /* LDR (immediate) */
7458 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7459 dsc, bit (insn2, 8), 1);
7460 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7461 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7462 "ldrt", dsc);
7463 else
7464 /* LDR (register) */
7465 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7466 dsc, 0, 0);
7467 }
7468 break;
7469 }
7470 default:
7471 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7472 break;
7473 }
7474 return 0;
7475 }
7476
7477 static void
7478 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7479 uint16_t insn2, struct regcache *regs,
7480 arm_displaced_step_copy_insn_closure *dsc)
7481 {
7482 int err = 0;
7483 unsigned short op = bit (insn2, 15);
7484 unsigned int op1 = bits (insn1, 11, 12);
7485
7486 switch (op1)
7487 {
7488 case 1:
7489 {
7490 switch (bits (insn1, 9, 10))
7491 {
7492 case 0:
7493 if (bit (insn1, 6))
7494 {
7495 /* Load/store {dual, exclusive}, table branch. */
7496 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7497 && bits (insn2, 5, 7) == 0)
7498 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7499 dsc);
7500 else
7501 /* PC is not allowed to use in load/store {dual, exclusive}
7502 instructions. */
7503 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7504 "load/store dual/ex", dsc);
7505 }
7506 else /* load/store multiple */
7507 {
7508 switch (bits (insn1, 7, 8))
7509 {
7510 case 0: case 3: /* SRS, RFE */
7511 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7512 "srs/rfe", dsc);
7513 break;
7514 case 1: case 2: /* LDM/STM/PUSH/POP */
7515 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7516 break;
7517 }
7518 }
7519 break;
7520
7521 case 1:
7522 /* Data-processing (shift register). */
7523 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7524 dsc);
7525 break;
7526 default: /* Coprocessor instructions. */
7527 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7528 break;
7529 }
7530 break;
7531 }
7532 case 2: /* op1 = 2 */
7533 if (op) /* Branch and misc control. */
7534 {
7535 if (bit (insn2, 14) /* BLX/BL */
7536 || bit (insn2, 12) /* Unconditional branch */
7537 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7538 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7539 else
7540 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7541 "misc ctrl", dsc);
7542 }
7543 else
7544 {
7545 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7546 {
7547 int dp_op = bits (insn1, 4, 8);
7548 int rn = bits (insn1, 0, 3);
7549 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7550 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7551 regs, dsc);
7552 else
7553 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7554 "dp/pb", dsc);
7555 }
7556 else /* Data processing (modified immediate) */
7557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7558 "dp/mi", dsc);
7559 }
7560 break;
7561 case 3: /* op1 = 3 */
7562 switch (bits (insn1, 9, 10))
7563 {
7564 case 0:
7565 if (bit (insn1, 4))
7566 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7567 regs, dsc);
7568 else /* NEON Load/Store and Store single data item */
7569 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7570 "neon elt/struct load/store",
7571 dsc);
7572 break;
7573 case 1: /* op1 = 3, bits (9, 10) == 1 */
7574 switch (bits (insn1, 7, 8))
7575 {
7576 case 0: case 1: /* Data processing (register) */
7577 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7578 "dp(reg)", dsc);
7579 break;
7580 case 2: /* Multiply and absolute difference */
7581 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7582 "mul/mua/diff", dsc);
7583 break;
7584 case 3: /* Long multiply and divide */
7585 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7586 "lmul/lmua", dsc);
7587 break;
7588 }
7589 break;
7590 default: /* Coprocessor instructions */
7591 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7592 break;
7593 }
7594 break;
7595 default:
7596 err = 1;
7597 }
7598
7599 if (err)
7600 internal_error (__FILE__, __LINE__,
7601 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7602
7603 }
7604
7605 static void
7606 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7607 struct regcache *regs,
7608 arm_displaced_step_copy_insn_closure *dsc)
7609 {
7610 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7611 uint16_t insn1
7612 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7613
7614 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7615 insn1, (unsigned long) from);
7616
7617 dsc->is_thumb = 1;
7618 dsc->insn_size = thumb_insn_size (insn1);
7619 if (thumb_insn_size (insn1) == 4)
7620 {
7621 uint16_t insn2
7622 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7623 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7624 }
7625 else
7626 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7627 }
7628
7629 void
7630 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7631 CORE_ADDR to, struct regcache *regs,
7632 arm_displaced_step_copy_insn_closure *dsc)
7633 {
7634 int err = 0;
7635 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7636 uint32_t insn;
7637
7638 /* Most displaced instructions use a 1-instruction scratch space, so set this
7639 here and override below if/when necessary. */
7640 dsc->numinsns = 1;
7641 dsc->insn_addr = from;
7642 dsc->scratch_base = to;
7643 dsc->cleanup = NULL;
7644 dsc->wrote_to_pc = 0;
7645
7646 if (!displaced_in_arm_mode (regs))
7647 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7648
7649 dsc->is_thumb = 0;
7650 dsc->insn_size = 4;
7651 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7652 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7653 (unsigned long) insn, (unsigned long) from);
7654
7655 if ((insn & 0xf0000000) == 0xf0000000)
7656 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7657 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7658 {
7659 case 0x0: case 0x1: case 0x2: case 0x3:
7660 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7661 break;
7662
7663 case 0x4: case 0x5: case 0x6:
7664 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7665 break;
7666
7667 case 0x7:
7668 err = arm_decode_media (gdbarch, insn, dsc);
7669 break;
7670
7671 case 0x8: case 0x9: case 0xa: case 0xb:
7672 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7673 break;
7674
7675 case 0xc: case 0xd: case 0xe: case 0xf:
7676 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7677 break;
7678 }
7679
7680 if (err)
7681 internal_error (__FILE__, __LINE__,
7682 _("arm_process_displaced_insn: Instruction decode error"));
7683 }
7684
7685 /* Actually set up the scratch space for a displaced instruction. */
7686
7687 void
7688 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7689 CORE_ADDR to,
7690 arm_displaced_step_copy_insn_closure *dsc)
7691 {
7692 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7693 unsigned int i, len, offset;
7694 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7695 int size = dsc->is_thumb? 2 : 4;
7696 const gdb_byte *bkp_insn;
7697
7698 offset = 0;
7699 /* Poke modified instruction(s). */
7700 for (i = 0; i < dsc->numinsns; i++)
7701 {
7702 if (size == 4)
7703 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7704 dsc->modinsn[i], (unsigned long) to + offset);
7705 else if (size == 2)
7706 displaced_debug_printf ("writing insn %.4x at %.8lx",
7707 (unsigned short) dsc->modinsn[i],
7708 (unsigned long) to + offset);
7709
7710 write_memory_unsigned_integer (to + offset, size,
7711 byte_order_for_code,
7712 dsc->modinsn[i]);
7713 offset += size;
7714 }
7715
7716 /* Choose the correct breakpoint instruction. */
7717 if (dsc->is_thumb)
7718 {
7719 bkp_insn = tdep->thumb_breakpoint;
7720 len = tdep->thumb_breakpoint_size;
7721 }
7722 else
7723 {
7724 bkp_insn = tdep->arm_breakpoint;
7725 len = tdep->arm_breakpoint_size;
7726 }
7727
7728 /* Put breakpoint afterwards. */
7729 write_memory (to + offset, bkp_insn, len);
7730
7731 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7732 paddress (gdbarch, to));
7733 }
7734
7735 /* Entry point for cleaning things up after a displaced instruction has been
7736 single-stepped. */
7737
7738 void
7739 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7740 struct displaced_step_copy_insn_closure *dsc_,
7741 CORE_ADDR from, CORE_ADDR to,
7742 struct regcache *regs)
7743 {
7744 arm_displaced_step_copy_insn_closure *dsc
7745 = (arm_displaced_step_copy_insn_closure *) dsc_;
7746
7747 if (dsc->cleanup)
7748 dsc->cleanup (gdbarch, regs, dsc);
7749
7750 if (!dsc->wrote_to_pc)
7751 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7752 dsc->insn_addr + dsc->insn_size);
7753
7754 }
7755
7756 #include "bfd-in2.h"
7757 #include "libcoff.h"
7758
7759 static int
7760 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7761 {
7762 gdb_disassembler *di
7763 = static_cast<gdb_disassembler *>(info->application_data);
7764 struct gdbarch *gdbarch = di->arch ();
7765
7766 if (arm_pc_is_thumb (gdbarch, memaddr))
7767 {
7768 static asymbol *asym;
7769 static combined_entry_type ce;
7770 static struct coff_symbol_struct csym;
7771 static struct bfd fake_bfd;
7772 static bfd_target fake_target;
7773
7774 if (csym.native == NULL)
7775 {
7776 /* Create a fake symbol vector containing a Thumb symbol.
7777 This is solely so that the code in print_insn_little_arm()
7778 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7779 the presence of a Thumb symbol and switch to decoding
7780 Thumb instructions. */
7781
7782 fake_target.flavour = bfd_target_coff_flavour;
7783 fake_bfd.xvec = &fake_target;
7784 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7785 csym.native = &ce;
7786 csym.symbol.the_bfd = &fake_bfd;
7787 csym.symbol.name = "fake";
7788 asym = (asymbol *) & csym;
7789 }
7790
7791 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7792 info->symbols = &asym;
7793 }
7794 else
7795 info->symbols = NULL;
7796
7797 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7798 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7799 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7800 the assert on the mismatch of info->mach and
7801 bfd_get_mach (current_program_space->exec_bfd ()) in
7802 default_print_insn. */
7803 if (current_program_space->exec_bfd () != NULL
7804 && (current_program_space->exec_bfd ()->arch_info
7805 == gdbarch_bfd_arch_info (gdbarch)))
7806 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7807
7808 return default_print_insn (memaddr, info);
7809 }
7810
7811 /* The following define instruction sequences that will cause ARM
7812 cpu's to take an undefined instruction trap. These are used to
7813 signal a breakpoint to GDB.
7814
7815 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7816 modes. A different instruction is required for each mode. The ARM
7817 cpu's can also be big or little endian. Thus four different
7818 instructions are needed to support all cases.
7819
7820 Note: ARMv4 defines several new instructions that will take the
7821 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7822 not in fact add the new instructions. The new undefined
7823 instructions in ARMv4 are all instructions that had no defined
7824 behaviour in earlier chips. There is no guarantee that they will
7825 raise an exception, but may be treated as NOP's. In practice, it
7826 may only safe to rely on instructions matching:
7827
7828 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7829 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7830 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7831
7832 Even this may only true if the condition predicate is true. The
7833 following use a condition predicate of ALWAYS so it is always TRUE.
7834
7835 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7836 and NetBSD all use a software interrupt rather than an undefined
7837 instruction to force a trap. This can be handled by by the
7838 abi-specific code during establishment of the gdbarch vector. */
7839
7840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7844
7845 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7846 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7847 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7848 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7849
7850 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7851
7852 static int
7853 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7854 {
7855 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7856 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7857
7858 if (arm_pc_is_thumb (gdbarch, *pcptr))
7859 {
7860 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7861
7862 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7863 check whether we are replacing a 32-bit instruction. */
7864 if (tdep->thumb2_breakpoint != NULL)
7865 {
7866 gdb_byte buf[2];
7867
7868 if (target_read_memory (*pcptr, buf, 2) == 0)
7869 {
7870 unsigned short inst1;
7871
7872 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7873 if (thumb_insn_size (inst1) == 4)
7874 return ARM_BP_KIND_THUMB2;
7875 }
7876 }
7877
7878 return ARM_BP_KIND_THUMB;
7879 }
7880 else
7881 return ARM_BP_KIND_ARM;
7882
7883 }
7884
7885 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7886
7887 static const gdb_byte *
7888 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7889 {
7890 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7891
7892 switch (kind)
7893 {
7894 case ARM_BP_KIND_ARM:
7895 *size = tdep->arm_breakpoint_size;
7896 return tdep->arm_breakpoint;
7897 case ARM_BP_KIND_THUMB:
7898 *size = tdep->thumb_breakpoint_size;
7899 return tdep->thumb_breakpoint;
7900 case ARM_BP_KIND_THUMB2:
7901 *size = tdep->thumb2_breakpoint_size;
7902 return tdep->thumb2_breakpoint;
7903 default:
7904 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7905 }
7906 }
7907
7908 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7909
7910 static int
7911 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7912 struct regcache *regcache,
7913 CORE_ADDR *pcptr)
7914 {
7915 gdb_byte buf[4];
7916
7917 /* Check the memory pointed by PC is readable. */
7918 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7919 {
7920 struct arm_get_next_pcs next_pcs_ctx;
7921
7922 arm_get_next_pcs_ctor (&next_pcs_ctx,
7923 &arm_get_next_pcs_ops,
7924 gdbarch_byte_order (gdbarch),
7925 gdbarch_byte_order_for_code (gdbarch),
7926 0,
7927 regcache);
7928
7929 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7930
7931 /* If MEMADDR is the next instruction of current pc, do the
7932 software single step computation, and get the thumb mode by
7933 the destination address. */
7934 for (CORE_ADDR pc : next_pcs)
7935 {
7936 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7937 {
7938 if (IS_THUMB_ADDR (pc))
7939 {
7940 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7941 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7942 }
7943 else
7944 return ARM_BP_KIND_ARM;
7945 }
7946 }
7947 }
7948
7949 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7950 }
7951
7952 /* Extract from an array REGBUF containing the (raw) register state a
7953 function return value of type TYPE, and copy that, in virtual
7954 format, into VALBUF. */
7955
7956 static void
7957 arm_extract_return_value (struct type *type, struct regcache *regs,
7958 gdb_byte *valbuf)
7959 {
7960 struct gdbarch *gdbarch = regs->arch ();
7961 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7962 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
7963
7964 if (TYPE_CODE_FLT == type->code ())
7965 {
7966 switch (tdep->fp_model)
7967 {
7968 case ARM_FLOAT_FPA:
7969 {
7970 /* The value is in register F0 in internal format. We need to
7971 extract the raw value and then convert it to the desired
7972 internal type. */
7973 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7974
7975 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7976 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7977 valbuf, type);
7978 }
7979 break;
7980
7981 case ARM_FLOAT_SOFT_FPA:
7982 case ARM_FLOAT_SOFT_VFP:
7983 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7984 not using the VFP ABI code. */
7985 case ARM_FLOAT_VFP:
7986 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7987 if (TYPE_LENGTH (type) > 4)
7988 regs->cooked_read (ARM_A1_REGNUM + 1,
7989 valbuf + ARM_INT_REGISTER_SIZE);
7990 break;
7991
7992 default:
7993 internal_error (__FILE__, __LINE__,
7994 _("arm_extract_return_value: "
7995 "Floating point model not supported"));
7996 break;
7997 }
7998 }
7999 else if (type->code () == TYPE_CODE_INT
8000 || type->code () == TYPE_CODE_CHAR
8001 || type->code () == TYPE_CODE_BOOL
8002 || type->code () == TYPE_CODE_PTR
8003 || TYPE_IS_REFERENCE (type)
8004 || type->code () == TYPE_CODE_ENUM
8005 || is_fixed_point_type (type))
8006 {
8007 /* If the type is a plain integer, then the access is
8008 straight-forward. Otherwise we have to play around a bit
8009 more. */
8010 int len = TYPE_LENGTH (type);
8011 int regno = ARM_A1_REGNUM;
8012 ULONGEST tmp;
8013
8014 while (len > 0)
8015 {
8016 /* By using store_unsigned_integer we avoid having to do
8017 anything special for small big-endian values. */
8018 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8019 store_unsigned_integer (valbuf,
8020 (len > ARM_INT_REGISTER_SIZE
8021 ? ARM_INT_REGISTER_SIZE : len),
8022 byte_order, tmp);
8023 len -= ARM_INT_REGISTER_SIZE;
8024 valbuf += ARM_INT_REGISTER_SIZE;
8025 }
8026 }
8027 else
8028 {
8029 /* For a structure or union the behaviour is as if the value had
8030 been stored to word-aligned memory and then loaded into
8031 registers with 32-bit load instruction(s). */
8032 int len = TYPE_LENGTH (type);
8033 int regno = ARM_A1_REGNUM;
8034 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8035
8036 while (len > 0)
8037 {
8038 regs->cooked_read (regno++, tmpbuf);
8039 memcpy (valbuf, tmpbuf,
8040 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8041 len -= ARM_INT_REGISTER_SIZE;
8042 valbuf += ARM_INT_REGISTER_SIZE;
8043 }
8044 }
8045 }
8046
8047
8048 /* Will a function return an aggregate type in memory or in a
8049 register? Return 0 if an aggregate type can be returned in a
8050 register, 1 if it must be returned in memory. */
8051
8052 static int
8053 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8054 {
8055 enum type_code code;
8056
8057 type = check_typedef (type);
8058
8059 /* Simple, non-aggregate types (ie not including vectors and
8060 complex) are always returned in a register (or registers). */
8061 code = type->code ();
8062 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8063 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8064 return 0;
8065
8066 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8067 {
8068 /* Vector values should be returned using ARM registers if they
8069 are not over 16 bytes. */
8070 return (TYPE_LENGTH (type) > 16);
8071 }
8072
8073 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8074 if (tdep->arm_abi != ARM_ABI_APCS)
8075 {
8076 /* The AAPCS says all aggregates not larger than a word are returned
8077 in a register. */
8078 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8079 return 0;
8080
8081 return 1;
8082 }
8083 else
8084 {
8085 int nRc;
8086
8087 /* All aggregate types that won't fit in a register must be returned
8088 in memory. */
8089 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8090 return 1;
8091
8092 /* In the ARM ABI, "integer" like aggregate types are returned in
8093 registers. For an aggregate type to be integer like, its size
8094 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8095 offset of each addressable subfield must be zero. Note that bit
8096 fields are not addressable, and all addressable subfields of
8097 unions always start at offset zero.
8098
8099 This function is based on the behaviour of GCC 2.95.1.
8100 See: gcc/arm.c: arm_return_in_memory() for details.
8101
8102 Note: All versions of GCC before GCC 2.95.2 do not set up the
8103 parameters correctly for a function returning the following
8104 structure: struct { float f;}; This should be returned in memory,
8105 not a register. Richard Earnshaw sent me a patch, but I do not
8106 know of any way to detect if a function like the above has been
8107 compiled with the correct calling convention. */
8108
8109 /* Assume all other aggregate types can be returned in a register.
8110 Run a check for structures, unions and arrays. */
8111 nRc = 0;
8112
8113 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8114 {
8115 int i;
8116 /* Need to check if this struct/union is "integer" like. For
8117 this to be true, its size must be less than or equal to
8118 ARM_INT_REGISTER_SIZE and the offset of each addressable
8119 subfield must be zero. Note that bit fields are not
8120 addressable, and unions always start at offset zero. If any
8121 of the subfields is a floating point type, the struct/union
8122 cannot be an integer type. */
8123
8124 /* For each field in the object, check:
8125 1) Is it FP? --> yes, nRc = 1;
8126 2) Is it addressable (bitpos != 0) and
8127 not packed (bitsize == 0)?
8128 --> yes, nRc = 1
8129 */
8130
8131 for (i = 0; i < type->num_fields (); i++)
8132 {
8133 enum type_code field_type_code;
8134
8135 field_type_code
8136 = check_typedef (type->field (i).type ())->code ();
8137
8138 /* Is it a floating point type field? */
8139 if (field_type_code == TYPE_CODE_FLT)
8140 {
8141 nRc = 1;
8142 break;
8143 }
8144
8145 /* If bitpos != 0, then we have to care about it. */
8146 if (type->field (i).loc_bitpos () != 0)
8147 {
8148 /* Bitfields are not addressable. If the field bitsize is
8149 zero, then the field is not packed. Hence it cannot be
8150 a bitfield or any other packed type. */
8151 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8152 {
8153 nRc = 1;
8154 break;
8155 }
8156 }
8157 }
8158 }
8159
8160 return nRc;
8161 }
8162 }
8163
8164 /* Write into appropriate registers a function return value of type
8165 TYPE, given in virtual format. */
8166
8167 static void
8168 arm_store_return_value (struct type *type, struct regcache *regs,
8169 const gdb_byte *valbuf)
8170 {
8171 struct gdbarch *gdbarch = regs->arch ();
8172 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8173
8174 if (type->code () == TYPE_CODE_FLT)
8175 {
8176 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8177 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8178
8179 switch (tdep->fp_model)
8180 {
8181 case ARM_FLOAT_FPA:
8182
8183 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8184 regs->cooked_write (ARM_F0_REGNUM, buf);
8185 break;
8186
8187 case ARM_FLOAT_SOFT_FPA:
8188 case ARM_FLOAT_SOFT_VFP:
8189 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8190 not using the VFP ABI code. */
8191 case ARM_FLOAT_VFP:
8192 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8193 if (TYPE_LENGTH (type) > 4)
8194 regs->cooked_write (ARM_A1_REGNUM + 1,
8195 valbuf + ARM_INT_REGISTER_SIZE);
8196 break;
8197
8198 default:
8199 internal_error (__FILE__, __LINE__,
8200 _("arm_store_return_value: Floating "
8201 "point model not supported"));
8202 break;
8203 }
8204 }
8205 else if (type->code () == TYPE_CODE_INT
8206 || type->code () == TYPE_CODE_CHAR
8207 || type->code () == TYPE_CODE_BOOL
8208 || type->code () == TYPE_CODE_PTR
8209 || TYPE_IS_REFERENCE (type)
8210 || type->code () == TYPE_CODE_ENUM)
8211 {
8212 if (TYPE_LENGTH (type) <= 4)
8213 {
8214 /* Values of one word or less are zero/sign-extended and
8215 returned in r0. */
8216 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8217 LONGEST val = unpack_long (type, valbuf);
8218
8219 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8220 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8221 }
8222 else
8223 {
8224 /* Integral values greater than one word are stored in consecutive
8225 registers starting with r0. This will always be a multiple of
8226 the regiser size. */
8227 int len = TYPE_LENGTH (type);
8228 int regno = ARM_A1_REGNUM;
8229
8230 while (len > 0)
8231 {
8232 regs->cooked_write (regno++, valbuf);
8233 len -= ARM_INT_REGISTER_SIZE;
8234 valbuf += ARM_INT_REGISTER_SIZE;
8235 }
8236 }
8237 }
8238 else
8239 {
8240 /* For a structure or union the behaviour is as if the value had
8241 been stored to word-aligned memory and then loaded into
8242 registers with 32-bit load instruction(s). */
8243 int len = TYPE_LENGTH (type);
8244 int regno = ARM_A1_REGNUM;
8245 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8246
8247 while (len > 0)
8248 {
8249 memcpy (tmpbuf, valbuf,
8250 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8251 regs->cooked_write (regno++, tmpbuf);
8252 len -= ARM_INT_REGISTER_SIZE;
8253 valbuf += ARM_INT_REGISTER_SIZE;
8254 }
8255 }
8256 }
8257
8258
8259 /* Handle function return values. */
8260
8261 static enum return_value_convention
8262 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8263 struct type *valtype, struct regcache *regcache,
8264 gdb_byte *readbuf, const gdb_byte *writebuf)
8265 {
8266 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8267 struct type *func_type = function ? value_type (function) : NULL;
8268 enum arm_vfp_cprc_base_type vfp_base_type;
8269 int vfp_base_count;
8270
8271 if (arm_vfp_abi_for_function (gdbarch, func_type)
8272 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8273 {
8274 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8275 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8276 int i;
8277 for (i = 0; i < vfp_base_count; i++)
8278 {
8279 if (reg_char == 'q')
8280 {
8281 if (writebuf)
8282 arm_neon_quad_write (gdbarch, regcache, i,
8283 writebuf + i * unit_length);
8284
8285 if (readbuf)
8286 arm_neon_quad_read (gdbarch, regcache, i,
8287 readbuf + i * unit_length);
8288 }
8289 else
8290 {
8291 char name_buf[4];
8292 int regnum;
8293
8294 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8295 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8296 strlen (name_buf));
8297 if (writebuf)
8298 regcache->cooked_write (regnum, writebuf + i * unit_length);
8299 if (readbuf)
8300 regcache->cooked_read (regnum, readbuf + i * unit_length);
8301 }
8302 }
8303 return RETURN_VALUE_REGISTER_CONVENTION;
8304 }
8305
8306 if (valtype->code () == TYPE_CODE_STRUCT
8307 || valtype->code () == TYPE_CODE_UNION
8308 || valtype->code () == TYPE_CODE_ARRAY)
8309 {
8310 if (tdep->struct_return == pcc_struct_return
8311 || arm_return_in_memory (gdbarch, valtype))
8312 return RETURN_VALUE_STRUCT_CONVENTION;
8313 }
8314 else if (valtype->code () == TYPE_CODE_COMPLEX)
8315 {
8316 if (arm_return_in_memory (gdbarch, valtype))
8317 return RETURN_VALUE_STRUCT_CONVENTION;
8318 }
8319
8320 if (writebuf)
8321 arm_store_return_value (valtype, regcache, writebuf);
8322
8323 if (readbuf)
8324 arm_extract_return_value (valtype, regcache, readbuf);
8325
8326 return RETURN_VALUE_REGISTER_CONVENTION;
8327 }
8328
8329
8330 static int
8331 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8332 {
8333 struct gdbarch *gdbarch = get_frame_arch (frame);
8334 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8335 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8336 CORE_ADDR jb_addr;
8337 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8338
8339 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8340
8341 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8342 ARM_INT_REGISTER_SIZE))
8343 return 0;
8344
8345 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8346 return 1;
8347 }
8348 /* A call to cmse secure entry function "foo" at "a" is modified by
8349 GNU ld as "b".
8350 a) bl xxxx <foo>
8351
8352 <foo>
8353 xxxx:
8354
8355 b) bl yyyy <__acle_se_foo>
8356
8357 section .gnu.sgstubs:
8358 <foo>
8359 yyyy: sg // secure gateway
8360 b.w xxxx <__acle_se_foo> // original_branch_dest
8361
8362 <__acle_se_foo>
8363 xxxx:
8364
8365 When the control at "b", the pc contains "yyyy" (sg address) which is a
8366 trampoline and does not exist in source code. This function returns the
8367 target pc "xxxx". For more details please refer to section 5.4
8368 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8369 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8370 document on www.developer.arm.com. */
8371
8372 static CORE_ADDR
8373 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8374 {
8375 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8376 char *target_name = (char *) alloca (target_len);
8377 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8378
8379 struct bound_minimal_symbol minsym
8380 = lookup_minimal_symbol (target_name, NULL, objfile);
8381
8382 if (minsym.minsym != nullptr)
8383 return BMSYMBOL_VALUE_ADDRESS (minsym);
8384
8385 return 0;
8386 }
8387
8388 /* Return true when SEC points to ".gnu.sgstubs" section. */
8389
8390 static bool
8391 arm_is_sgstubs_section (struct obj_section *sec)
8392 {
8393 return (sec != nullptr
8394 && sec->the_bfd_section != nullptr
8395 && sec->the_bfd_section->name != nullptr
8396 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8397 }
8398
8399 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8400 return the target PC. Otherwise return 0. */
8401
8402 CORE_ADDR
8403 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8404 {
8405 const char *name;
8406 int namelen;
8407 CORE_ADDR start_addr;
8408
8409 /* Find the starting address and name of the function containing the PC. */
8410 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8411 {
8412 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8413 check here. */
8414 start_addr = arm_skip_bx_reg (frame, pc);
8415 if (start_addr != 0)
8416 return start_addr;
8417
8418 return 0;
8419 }
8420
8421 /* If PC is in a Thumb call or return stub, return the address of the
8422 target PC, which is in a register. The thunk functions are called
8423 _call_via_xx, where x is the register name. The possible names
8424 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8425 functions, named __ARM_call_via_r[0-7]. */
8426 if (startswith (name, "_call_via_")
8427 || startswith (name, "__ARM_call_via_"))
8428 {
8429 /* Use the name suffix to determine which register contains the
8430 target PC. */
8431 static const char *table[15] =
8432 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8433 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8434 };
8435 int regno;
8436 int offset = strlen (name) - 2;
8437
8438 for (regno = 0; regno <= 14; regno++)
8439 if (strcmp (&name[offset], table[regno]) == 0)
8440 return get_frame_register_unsigned (frame, regno);
8441 }
8442
8443 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8444 non-interworking calls to foo. We could decode the stubs
8445 to find the target but it's easier to use the symbol table. */
8446 namelen = strlen (name);
8447 if (name[0] == '_' && name[1] == '_'
8448 && ((namelen > 2 + strlen ("_from_thumb")
8449 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8450 || (namelen > 2 + strlen ("_from_arm")
8451 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8452 {
8453 char *target_name;
8454 int target_len = namelen - 2;
8455 struct bound_minimal_symbol minsym;
8456 struct objfile *objfile;
8457 struct obj_section *sec;
8458
8459 if (name[namelen - 1] == 'b')
8460 target_len -= strlen ("_from_thumb");
8461 else
8462 target_len -= strlen ("_from_arm");
8463
8464 target_name = (char *) alloca (target_len + 1);
8465 memcpy (target_name, name + 2, target_len);
8466 target_name[target_len] = '\0';
8467
8468 sec = find_pc_section (pc);
8469 objfile = (sec == NULL) ? NULL : sec->objfile;
8470 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8471 if (minsym.minsym != NULL)
8472 return BMSYMBOL_VALUE_ADDRESS (minsym);
8473 else
8474 return 0;
8475 }
8476
8477 struct obj_section *section = find_pc_section (pc);
8478
8479 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8480 if (arm_is_sgstubs_section (section))
8481 return arm_skip_cmse_entry (pc, name, section->objfile);
8482
8483 return 0; /* not a stub */
8484 }
8485
8486 static void
8487 arm_update_current_architecture (void)
8488 {
8489 /* If the current architecture is not ARM, we have nothing to do. */
8490 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8491 return;
8492
8493 /* Update the architecture. */
8494 gdbarch_info info;
8495 if (!gdbarch_update_p (info))
8496 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8497 }
8498
8499 static void
8500 set_fp_model_sfunc (const char *args, int from_tty,
8501 struct cmd_list_element *c)
8502 {
8503 int fp_model;
8504
8505 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8506 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8507 {
8508 arm_fp_model = (enum arm_float_model) fp_model;
8509 break;
8510 }
8511
8512 if (fp_model == ARM_FLOAT_LAST)
8513 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8514 current_fp_model);
8515
8516 arm_update_current_architecture ();
8517 }
8518
8519 static void
8520 show_fp_model (struct ui_file *file, int from_tty,
8521 struct cmd_list_element *c, const char *value)
8522 {
8523 arm_gdbarch_tdep *tdep
8524 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8525
8526 if (arm_fp_model == ARM_FLOAT_AUTO
8527 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8528 fprintf_filtered (file, _("\
8529 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8530 fp_model_strings[tdep->fp_model]);
8531 else
8532 fprintf_filtered (file, _("\
8533 The current ARM floating point model is \"%s\".\n"),
8534 fp_model_strings[arm_fp_model]);
8535 }
8536
8537 static void
8538 arm_set_abi (const char *args, int from_tty,
8539 struct cmd_list_element *c)
8540 {
8541 int arm_abi;
8542
8543 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8544 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8545 {
8546 arm_abi_global = (enum arm_abi_kind) arm_abi;
8547 break;
8548 }
8549
8550 if (arm_abi == ARM_ABI_LAST)
8551 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8552 arm_abi_string);
8553
8554 arm_update_current_architecture ();
8555 }
8556
8557 static void
8558 arm_show_abi (struct ui_file *file, int from_tty,
8559 struct cmd_list_element *c, const char *value)
8560 {
8561 arm_gdbarch_tdep *tdep
8562 = (arm_gdbarch_tdep *) gdbarch_tdep (target_gdbarch ());
8563
8564 if (arm_abi_global == ARM_ABI_AUTO
8565 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8566 fprintf_filtered (file, _("\
8567 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8568 arm_abi_strings[tdep->arm_abi]);
8569 else
8570 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8571 arm_abi_string);
8572 }
8573
8574 static void
8575 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8576 struct cmd_list_element *c, const char *value)
8577 {
8578 fprintf_filtered (file,
8579 _("The current execution mode assumed "
8580 "(when symbols are unavailable) is \"%s\".\n"),
8581 arm_fallback_mode_string);
8582 }
8583
8584 static void
8585 arm_show_force_mode (struct ui_file *file, int from_tty,
8586 struct cmd_list_element *c, const char *value)
8587 {
8588 fprintf_filtered (file,
8589 _("The current execution mode assumed "
8590 "(even when symbols are available) is \"%s\".\n"),
8591 arm_force_mode_string);
8592 }
8593
8594 /* If the user changes the register disassembly style used for info
8595 register and other commands, we have to also switch the style used
8596 in opcodes for disassembly output. This function is run in the "set
8597 arm disassembly" command, and does that. */
8598
8599 static void
8600 set_disassembly_style_sfunc (const char *args, int from_tty,
8601 struct cmd_list_element *c)
8602 {
8603 /* Convert the short style name into the long style name (eg, reg-names-*)
8604 before calling the generic set_disassembler_options() function. */
8605 std::string long_name = std::string ("reg-names-") + disassembly_style;
8606 set_disassembler_options (&long_name[0]);
8607 }
8608
8609 static void
8610 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8611 struct cmd_list_element *c, const char *value)
8612 {
8613 struct gdbarch *gdbarch = get_current_arch ();
8614 char *options = get_disassembler_options (gdbarch);
8615 const char *style = "";
8616 int len = 0;
8617 const char *opt;
8618
8619 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8620 if (startswith (opt, "reg-names-"))
8621 {
8622 style = &opt[strlen ("reg-names-")];
8623 len = strcspn (style, ",");
8624 }
8625
8626 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8627 }
8628 \f
8629 /* Return the ARM register name corresponding to register I. */
8630 static const char *
8631 arm_register_name (struct gdbarch *gdbarch, int i)
8632 {
8633 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8634
8635 if (is_s_pseudo (gdbarch, i))
8636 {
8637 static const char *const s_pseudo_names[] = {
8638 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8639 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8640 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8641 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8642 };
8643
8644 return s_pseudo_names[i - tdep->s_pseudo_base];
8645 }
8646
8647 if (is_q_pseudo (gdbarch, i))
8648 {
8649 static const char *const q_pseudo_names[] = {
8650 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8651 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8652 };
8653
8654 return q_pseudo_names[i - tdep->q_pseudo_base];
8655 }
8656
8657 if (is_mve_pseudo (gdbarch, i))
8658 return "p0";
8659
8660 if (i >= ARRAY_SIZE (arm_register_names))
8661 /* These registers are only supported on targets which supply
8662 an XML description. */
8663 return "";
8664
8665 /* Non-pseudo registers. */
8666 return arm_register_names[i];
8667 }
8668
8669 /* Test whether the coff symbol specific value corresponds to a Thumb
8670 function. */
8671
8672 static int
8673 coff_sym_is_thumb (int val)
8674 {
8675 return (val == C_THUMBEXT
8676 || val == C_THUMBSTAT
8677 || val == C_THUMBEXTFUNC
8678 || val == C_THUMBSTATFUNC
8679 || val == C_THUMBLABEL);
8680 }
8681
8682 /* arm_coff_make_msymbol_special()
8683 arm_elf_make_msymbol_special()
8684
8685 These functions test whether the COFF or ELF symbol corresponds to
8686 an address in thumb code, and set a "special" bit in a minimal
8687 symbol to indicate that it does. */
8688
8689 static void
8690 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8691 {
8692 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8693
8694 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8695 == ST_BRANCH_TO_THUMB)
8696 MSYMBOL_SET_SPECIAL (msym);
8697 }
8698
8699 static void
8700 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8701 {
8702 if (coff_sym_is_thumb (val))
8703 MSYMBOL_SET_SPECIAL (msym);
8704 }
8705
8706 static void
8707 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8708 asymbol *sym)
8709 {
8710 const char *name = bfd_asymbol_name (sym);
8711 struct arm_per_bfd *data;
8712 struct arm_mapping_symbol new_map_sym;
8713
8714 gdb_assert (name[0] == '$');
8715 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8716 return;
8717
8718 data = arm_bfd_data_key.get (objfile->obfd);
8719 if (data == NULL)
8720 data = arm_bfd_data_key.emplace (objfile->obfd,
8721 objfile->obfd->section_count);
8722 arm_mapping_symbol_vec &map
8723 = data->section_maps[bfd_asymbol_section (sym)->index];
8724
8725 new_map_sym.value = sym->value;
8726 new_map_sym.type = name[1];
8727
8728 /* Insert at the end, the vector will be sorted on first use. */
8729 map.push_back (new_map_sym);
8730 }
8731
8732 static void
8733 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8734 {
8735 struct gdbarch *gdbarch = regcache->arch ();
8736 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8737
8738 /* If necessary, set the T bit. */
8739 if (arm_apcs_32)
8740 {
8741 ULONGEST val, t_bit;
8742 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8743 t_bit = arm_psr_thumb_bit (gdbarch);
8744 if (arm_pc_is_thumb (gdbarch, pc))
8745 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8746 val | t_bit);
8747 else
8748 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8749 val & ~t_bit);
8750 }
8751 }
8752
8753 /* Read the contents of a NEON quad register, by reading from two
8754 double registers. This is used to implement the quad pseudo
8755 registers, and for argument passing in case the quad registers are
8756 missing; vectors are passed in quad registers when using the VFP
8757 ABI, even if a NEON unit is not present. REGNUM is the index of
8758 the quad register, in [0, 15]. */
8759
8760 static enum register_status
8761 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8762 int regnum, gdb_byte *buf)
8763 {
8764 char name_buf[4];
8765 gdb_byte reg_buf[8];
8766 int offset, double_regnum;
8767 enum register_status status;
8768
8769 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8770 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8771 strlen (name_buf));
8772
8773 /* d0 is always the least significant half of q0. */
8774 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8775 offset = 8;
8776 else
8777 offset = 0;
8778
8779 status = regcache->raw_read (double_regnum, reg_buf);
8780 if (status != REG_VALID)
8781 return status;
8782 memcpy (buf + offset, reg_buf, 8);
8783
8784 offset = 8 - offset;
8785 status = regcache->raw_read (double_regnum + 1, reg_buf);
8786 if (status != REG_VALID)
8787 return status;
8788 memcpy (buf + offset, reg_buf, 8);
8789
8790 return REG_VALID;
8791 }
8792
8793 /* Read the contents of the MVE pseudo register REGNUM and store it
8794 in BUF. */
8795
8796 static enum register_status
8797 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8798 int regnum, gdb_byte *buf)
8799 {
8800 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8801
8802 /* P0 is the first 16 bits of VPR. */
8803 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
8804 }
8805
8806 static enum register_status
8807 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8808 int regnum, gdb_byte *buf)
8809 {
8810 const int num_regs = gdbarch_num_regs (gdbarch);
8811 char name_buf[4];
8812 gdb_byte reg_buf[8];
8813 int offset, double_regnum;
8814 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8815
8816 gdb_assert (regnum >= num_regs);
8817
8818 if (is_q_pseudo (gdbarch, regnum))
8819 {
8820 /* Quad-precision register. */
8821 return arm_neon_quad_read (gdbarch, regcache,
8822 regnum - tdep->q_pseudo_base, buf);
8823 }
8824 else if (is_mve_pseudo (gdbarch, regnum))
8825 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
8826 else
8827 {
8828 enum register_status status;
8829
8830 regnum -= tdep->s_pseudo_base;
8831 /* Single-precision register. */
8832 gdb_assert (regnum < 32);
8833
8834 /* s0 is always the least significant half of d0. */
8835 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8836 offset = (regnum & 1) ? 0 : 4;
8837 else
8838 offset = (regnum & 1) ? 4 : 0;
8839
8840 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8841 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8842 strlen (name_buf));
8843
8844 status = regcache->raw_read (double_regnum, reg_buf);
8845 if (status == REG_VALID)
8846 memcpy (buf, reg_buf + offset, 4);
8847 return status;
8848 }
8849 }
8850
8851 /* Store the contents of BUF to a NEON quad register, by writing to
8852 two double registers. This is used to implement the quad pseudo
8853 registers, and for argument passing in case the quad registers are
8854 missing; vectors are passed in quad registers when using the VFP
8855 ABI, even if a NEON unit is not present. REGNUM is the index
8856 of the quad register, in [0, 15]. */
8857
8858 static void
8859 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8860 int regnum, const gdb_byte *buf)
8861 {
8862 char name_buf[4];
8863 int offset, double_regnum;
8864
8865 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8866 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8867 strlen (name_buf));
8868
8869 /* d0 is always the least significant half of q0. */
8870 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8871 offset = 8;
8872 else
8873 offset = 0;
8874
8875 regcache->raw_write (double_regnum, buf + offset);
8876 offset = 8 - offset;
8877 regcache->raw_write (double_regnum + 1, buf + offset);
8878 }
8879
8880 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
8881
8882 static void
8883 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8884 int regnum, const gdb_byte *buf)
8885 {
8886 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8887
8888 /* P0 is the first 16 bits of VPR. */
8889 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
8890 }
8891
8892 static void
8893 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8894 int regnum, const gdb_byte *buf)
8895 {
8896 const int num_regs = gdbarch_num_regs (gdbarch);
8897 char name_buf[4];
8898 gdb_byte reg_buf[8];
8899 int offset, double_regnum;
8900 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8901
8902 gdb_assert (regnum >= num_regs);
8903
8904 if (is_q_pseudo (gdbarch, regnum))
8905 {
8906 /* Quad-precision register. */
8907 arm_neon_quad_write (gdbarch, regcache,
8908 regnum - tdep->q_pseudo_base, buf);
8909 }
8910 else if (is_mve_pseudo (gdbarch, regnum))
8911 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
8912 else
8913 {
8914 regnum -= tdep->s_pseudo_base;
8915 /* Single-precision register. */
8916 gdb_assert (regnum < 32);
8917
8918 /* s0 is always the least significant half of d0. */
8919 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8920 offset = (regnum & 1) ? 0 : 4;
8921 else
8922 offset = (regnum & 1) ? 4 : 0;
8923
8924 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8925 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8926 strlen (name_buf));
8927
8928 regcache->raw_read (double_regnum, reg_buf);
8929 memcpy (reg_buf + offset, buf, 4);
8930 regcache->raw_write (double_regnum, reg_buf);
8931 }
8932 }
8933
8934 static struct value *
8935 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8936 {
8937 const int *reg_p = (const int *) baton;
8938 return value_of_register (*reg_p, frame);
8939 }
8940 \f
8941 static enum gdb_osabi
8942 arm_elf_osabi_sniffer (bfd *abfd)
8943 {
8944 unsigned int elfosabi;
8945 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8946
8947 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8948
8949 if (elfosabi == ELFOSABI_ARM)
8950 /* GNU tools use this value. Check note sections in this case,
8951 as well. */
8952 {
8953 for (asection *sect : gdb_bfd_sections (abfd))
8954 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8955 }
8956
8957 /* Anything else will be handled by the generic ELF sniffer. */
8958 return osabi;
8959 }
8960
8961 static int
8962 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8963 struct reggroup *group)
8964 {
8965 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8966 this, FPS register belongs to save_regroup, restore_reggroup, and
8967 all_reggroup, of course. */
8968 if (regnum == ARM_FPS_REGNUM)
8969 return (group == float_reggroup
8970 || group == save_reggroup
8971 || group == restore_reggroup
8972 || group == all_reggroup);
8973 else
8974 return default_register_reggroup_p (gdbarch, regnum, group);
8975 }
8976
8977 /* For backward-compatibility we allow two 'g' packet lengths with
8978 the remote protocol depending on whether FPA registers are
8979 supplied. M-profile targets do not have FPA registers, but some
8980 stubs already exist in the wild which use a 'g' packet which
8981 supplies them albeit with dummy values. The packet format which
8982 includes FPA registers should be considered deprecated for
8983 M-profile targets. */
8984
8985 static void
8986 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8987 {
8988 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
8989
8990 if (tdep->is_m)
8991 {
8992 const target_desc *tdesc;
8993
8994 /* If we know from the executable this is an M-profile target,
8995 cater for remote targets whose register set layout is the
8996 same as the FPA layout. */
8997 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8998 register_remote_g_packet_guess (gdbarch,
8999 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9000 tdesc);
9001
9002 /* The regular M-profile layout. */
9003 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9004 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9005 tdesc);
9006
9007 /* M-profile plus M4F VFP. */
9008 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9009 register_remote_g_packet_guess (gdbarch,
9010 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9011 tdesc);
9012 /* M-profile plus MVE. */
9013 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9014 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9015 + ARM_VFP2_REGS_SIZE
9016 + ARM_INT_REGISTER_SIZE, tdesc);
9017 }
9018
9019 /* Otherwise we don't have a useful guess. */
9020 }
9021
9022 /* Implement the code_of_frame_writable gdbarch method. */
9023
9024 static int
9025 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
9026 {
9027 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9028
9029 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
9030 {
9031 /* M-profile exception frames return to some magic PCs, where
9032 isn't writable at all. */
9033 return 0;
9034 }
9035 else
9036 return 1;
9037 }
9038
9039 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
9040 to be postfixed by a version (eg armv7hl). */
9041
9042 static const char *
9043 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
9044 {
9045 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
9046 return "arm(v[^- ]*)?";
9047 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
9048 }
9049
9050 /* Initialize the current architecture based on INFO. If possible,
9051 re-use an architecture from ARCHES, which is a list of
9052 architectures already created during this debugging session.
9053
9054 Called e.g. at program startup, when reading a core file, and when
9055 reading a binary file. */
9056
9057 static struct gdbarch *
9058 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9059 {
9060 struct gdbarch *gdbarch;
9061 struct gdbarch_list *best_arch;
9062 enum arm_abi_kind arm_abi = arm_abi_global;
9063 enum arm_float_model fp_model = arm_fp_model;
9064 tdesc_arch_data_up tdesc_data;
9065 int i;
9066 bool is_m = false;
9067 int vfp_register_count = 0;
9068 bool have_s_pseudos = false, have_q_pseudos = false;
9069 bool have_wmmx_registers = false;
9070 bool have_neon = false;
9071 bool have_fpa_registers = true;
9072 const struct target_desc *tdesc = info.target_desc;
9073 bool have_vfp = false;
9074 bool have_mve = false;
9075 int mve_vpr_regnum = -1;
9076 int register_count = ARM_NUM_REGS;
9077
9078 /* If we have an object to base this architecture on, try to determine
9079 its ABI. */
9080
9081 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9082 {
9083 int ei_osabi, e_flags;
9084
9085 switch (bfd_get_flavour (info.abfd))
9086 {
9087 case bfd_target_coff_flavour:
9088 /* Assume it's an old APCS-style ABI. */
9089 /* XXX WinCE? */
9090 arm_abi = ARM_ABI_APCS;
9091 break;
9092
9093 case bfd_target_elf_flavour:
9094 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9095 e_flags = elf_elfheader (info.abfd)->e_flags;
9096
9097 if (ei_osabi == ELFOSABI_ARM)
9098 {
9099 /* GNU tools used to use this value, but do not for EABI
9100 objects. There's nowhere to tag an EABI version
9101 anyway, so assume APCS. */
9102 arm_abi = ARM_ABI_APCS;
9103 }
9104 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9105 {
9106 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9107
9108 switch (eabi_ver)
9109 {
9110 case EF_ARM_EABI_UNKNOWN:
9111 /* Assume GNU tools. */
9112 arm_abi = ARM_ABI_APCS;
9113 break;
9114
9115 case EF_ARM_EABI_VER4:
9116 case EF_ARM_EABI_VER5:
9117 arm_abi = ARM_ABI_AAPCS;
9118 /* EABI binaries default to VFP float ordering.
9119 They may also contain build attributes that can
9120 be used to identify if the VFP argument-passing
9121 ABI is in use. */
9122 if (fp_model == ARM_FLOAT_AUTO)
9123 {
9124 #ifdef HAVE_ELF
9125 switch (bfd_elf_get_obj_attr_int (info.abfd,
9126 OBJ_ATTR_PROC,
9127 Tag_ABI_VFP_args))
9128 {
9129 case AEABI_VFP_args_base:
9130 /* "The user intended FP parameter/result
9131 passing to conform to AAPCS, base
9132 variant". */
9133 fp_model = ARM_FLOAT_SOFT_VFP;
9134 break;
9135 case AEABI_VFP_args_vfp:
9136 /* "The user intended FP parameter/result
9137 passing to conform to AAPCS, VFP
9138 variant". */
9139 fp_model = ARM_FLOAT_VFP;
9140 break;
9141 case AEABI_VFP_args_toolchain:
9142 /* "The user intended FP parameter/result
9143 passing to conform to tool chain-specific
9144 conventions" - we don't know any such
9145 conventions, so leave it as "auto". */
9146 break;
9147 case AEABI_VFP_args_compatible:
9148 /* "Code is compatible with both the base
9149 and VFP variants; the user did not permit
9150 non-variadic functions to pass FP
9151 parameters/results" - leave it as
9152 "auto". */
9153 break;
9154 default:
9155 /* Attribute value not mentioned in the
9156 November 2012 ABI, so leave it as
9157 "auto". */
9158 break;
9159 }
9160 #else
9161 fp_model = ARM_FLOAT_SOFT_VFP;
9162 #endif
9163 }
9164 break;
9165
9166 default:
9167 /* Leave it as "auto". */
9168 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9169 break;
9170 }
9171
9172 #ifdef HAVE_ELF
9173 /* Detect M-profile programs. This only works if the
9174 executable file includes build attributes; GCC does
9175 copy them to the executable, but e.g. RealView does
9176 not. */
9177 int attr_arch
9178 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9179 Tag_CPU_arch);
9180 int attr_profile
9181 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9182 Tag_CPU_arch_profile);
9183
9184 /* GCC specifies the profile for v6-M; RealView only
9185 specifies the profile for architectures starting with
9186 V7 (as opposed to architectures with a tag
9187 numerically greater than TAG_CPU_ARCH_V7). */
9188 if (!tdesc_has_registers (tdesc)
9189 && (attr_arch == TAG_CPU_ARCH_V6_M
9190 || attr_arch == TAG_CPU_ARCH_V6S_M
9191 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
9192 || attr_profile == 'M'))
9193 is_m = true;
9194 #endif
9195 }
9196
9197 if (fp_model == ARM_FLOAT_AUTO)
9198 {
9199 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9200 {
9201 case 0:
9202 /* Leave it as "auto". Strictly speaking this case
9203 means FPA, but almost nobody uses that now, and
9204 many toolchains fail to set the appropriate bits
9205 for the floating-point model they use. */
9206 break;
9207 case EF_ARM_SOFT_FLOAT:
9208 fp_model = ARM_FLOAT_SOFT_FPA;
9209 break;
9210 case EF_ARM_VFP_FLOAT:
9211 fp_model = ARM_FLOAT_VFP;
9212 break;
9213 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9214 fp_model = ARM_FLOAT_SOFT_VFP;
9215 break;
9216 }
9217 }
9218
9219 if (e_flags & EF_ARM_BE8)
9220 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9221
9222 break;
9223
9224 default:
9225 /* Leave it as "auto". */
9226 break;
9227 }
9228 }
9229
9230 /* Check any target description for validity. */
9231 if (tdesc_has_registers (tdesc))
9232 {
9233 /* For most registers we require GDB's default names; but also allow
9234 the numeric names for sp / lr / pc, as a convenience. */
9235 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9236 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9237 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9238
9239 const struct tdesc_feature *feature;
9240 int valid_p;
9241
9242 feature = tdesc_find_feature (tdesc,
9243 "org.gnu.gdb.arm.core");
9244 if (feature == NULL)
9245 {
9246 feature = tdesc_find_feature (tdesc,
9247 "org.gnu.gdb.arm.m-profile");
9248 if (feature == NULL)
9249 return NULL;
9250 else
9251 is_m = true;
9252 }
9253
9254 tdesc_data = tdesc_data_alloc ();
9255
9256 valid_p = 1;
9257 for (i = 0; i < ARM_SP_REGNUM; i++)
9258 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9259 arm_register_names[i]);
9260 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9261 ARM_SP_REGNUM,
9262 arm_sp_names);
9263 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9264 ARM_LR_REGNUM,
9265 arm_lr_names);
9266 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9267 ARM_PC_REGNUM,
9268 arm_pc_names);
9269 if (is_m)
9270 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9271 ARM_PS_REGNUM, "xpsr");
9272 else
9273 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9274 ARM_PS_REGNUM, "cpsr");
9275
9276 if (!valid_p)
9277 return NULL;
9278
9279 feature = tdesc_find_feature (tdesc,
9280 "org.gnu.gdb.arm.fpa");
9281 if (feature != NULL)
9282 {
9283 valid_p = 1;
9284 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9285 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9286 arm_register_names[i]);
9287 if (!valid_p)
9288 return NULL;
9289 }
9290 else
9291 have_fpa_registers = false;
9292
9293 feature = tdesc_find_feature (tdesc,
9294 "org.gnu.gdb.xscale.iwmmxt");
9295 if (feature != NULL)
9296 {
9297 static const char *const iwmmxt_names[] = {
9298 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9299 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9300 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9301 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9302 };
9303
9304 valid_p = 1;
9305 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9306 valid_p
9307 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9308 iwmmxt_names[i - ARM_WR0_REGNUM]);
9309
9310 /* Check for the control registers, but do not fail if they
9311 are missing. */
9312 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9313 tdesc_numbered_register (feature, tdesc_data.get (), i,
9314 iwmmxt_names[i - ARM_WR0_REGNUM]);
9315
9316 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9317 valid_p
9318 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9319 iwmmxt_names[i - ARM_WR0_REGNUM]);
9320
9321 if (!valid_p)
9322 return NULL;
9323
9324 have_wmmx_registers = true;
9325 }
9326
9327 /* If we have a VFP unit, check whether the single precision registers
9328 are present. If not, then we will synthesize them as pseudo
9329 registers. */
9330 feature = tdesc_find_feature (tdesc,
9331 "org.gnu.gdb.arm.vfp");
9332 if (feature != NULL)
9333 {
9334 static const char *const vfp_double_names[] = {
9335 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9336 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9337 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9338 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9339 };
9340
9341 /* Require the double precision registers. There must be either
9342 16 or 32. */
9343 valid_p = 1;
9344 for (i = 0; i < 32; i++)
9345 {
9346 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9347 ARM_D0_REGNUM + i,
9348 vfp_double_names[i]);
9349 if (!valid_p)
9350 break;
9351 }
9352 if (!valid_p && i == 16)
9353 valid_p = 1;
9354
9355 /* Also require FPSCR. */
9356 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9357 ARM_FPSCR_REGNUM, "fpscr");
9358 if (!valid_p)
9359 return NULL;
9360
9361 have_vfp = true;
9362
9363 if (tdesc_unnumbered_register (feature, "s0") == 0)
9364 have_s_pseudos = true;
9365
9366 vfp_register_count = i;
9367
9368 /* If we have VFP, also check for NEON. The architecture allows
9369 NEON without VFP (integer vector operations only), but GDB
9370 does not support that. */
9371 feature = tdesc_find_feature (tdesc,
9372 "org.gnu.gdb.arm.neon");
9373 if (feature != NULL)
9374 {
9375 /* NEON requires 32 double-precision registers. */
9376 if (i != 32)
9377 return NULL;
9378
9379 /* If there are quad registers defined by the stub, use
9380 their type; otherwise (normally) provide them with
9381 the default type. */
9382 if (tdesc_unnumbered_register (feature, "q0") == 0)
9383 have_q_pseudos = true;
9384 }
9385 }
9386
9387 /* Check for MVE after all the checks for GPR's, VFP and Neon.
9388 MVE (Helium) is an M-profile extension. */
9389 if (is_m)
9390 {
9391 /* Do we have the MVE feature? */
9392 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
9393
9394 if (feature != nullptr)
9395 {
9396 /* If we have MVE, we must always have the VPR register. */
9397 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9398 register_count, "vpr");
9399 if (!valid_p)
9400 {
9401 warning (_("MVE feature is missing required register vpr."));
9402 return nullptr;
9403 }
9404
9405 have_mve = true;
9406 mve_vpr_regnum = register_count;
9407 register_count++;
9408
9409 /* We can't have Q pseudo registers available here, as that
9410 would mean we have NEON features, and that is only available
9411 on A and R profiles. */
9412 gdb_assert (!have_q_pseudos);
9413
9414 /* Given we have a M-profile target description, if MVE is
9415 enabled and there are VFP registers, we should have Q
9416 pseudo registers (Q0 ~ Q7). */
9417 if (have_vfp)
9418 have_q_pseudos = true;
9419 }
9420 }
9421 }
9422
9423 /* If there is already a candidate, use it. */
9424 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9425 best_arch != NULL;
9426 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9427 {
9428 arm_gdbarch_tdep *tdep
9429 = (arm_gdbarch_tdep *) gdbarch_tdep (best_arch->gdbarch);
9430
9431 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
9432 continue;
9433
9434 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
9435 continue;
9436
9437 /* There are various other properties in tdep that we do not
9438 need to check here: those derived from a target description,
9439 since gdbarches with a different target description are
9440 automatically disqualified. */
9441
9442 /* Do check is_m, though, since it might come from the binary. */
9443 if (is_m != tdep->is_m)
9444 continue;
9445
9446 /* Found a match. */
9447 break;
9448 }
9449
9450 if (best_arch != NULL)
9451 return best_arch->gdbarch;
9452
9453 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep;
9454 gdbarch = gdbarch_alloc (&info, tdep);
9455
9456 /* Record additional information about the architecture we are defining.
9457 These are gdbarch discriminators, like the OSABI. */
9458 tdep->arm_abi = arm_abi;
9459 tdep->fp_model = fp_model;
9460 tdep->is_m = is_m;
9461 tdep->have_fpa_registers = have_fpa_registers;
9462 tdep->have_wmmx_registers = have_wmmx_registers;
9463 gdb_assert (vfp_register_count == 0
9464 || vfp_register_count == 16
9465 || vfp_register_count == 32);
9466 tdep->vfp_register_count = vfp_register_count;
9467 tdep->have_s_pseudos = have_s_pseudos;
9468 tdep->have_q_pseudos = have_q_pseudos;
9469 tdep->have_neon = have_neon;
9470
9471 /* Adjust the MVE feature settings. */
9472 if (have_mve)
9473 {
9474 tdep->have_mve = true;
9475 tdep->mve_vpr_regnum = mve_vpr_regnum;
9476 }
9477
9478 arm_register_g_packet_guesses (gdbarch);
9479
9480 /* Breakpoints. */
9481 switch (info.byte_order_for_code)
9482 {
9483 case BFD_ENDIAN_BIG:
9484 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9485 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9486 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9487 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9488
9489 break;
9490
9491 case BFD_ENDIAN_LITTLE:
9492 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9493 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9494 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9495 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9496
9497 break;
9498
9499 default:
9500 internal_error (__FILE__, __LINE__,
9501 _("arm_gdbarch_init: bad byte order for float format"));
9502 }
9503
9504 /* On ARM targets char defaults to unsigned. */
9505 set_gdbarch_char_signed (gdbarch, 0);
9506
9507 /* wchar_t is unsigned under the AAPCS. */
9508 if (tdep->arm_abi == ARM_ABI_AAPCS)
9509 set_gdbarch_wchar_signed (gdbarch, 0);
9510 else
9511 set_gdbarch_wchar_signed (gdbarch, 1);
9512
9513 /* Compute type alignment. */
9514 set_gdbarch_type_align (gdbarch, arm_type_align);
9515
9516 /* Note: for displaced stepping, this includes the breakpoint, and one word
9517 of additional scratch space. This setting isn't used for anything beside
9518 displaced stepping at present. */
9519 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9520
9521 /* This should be low enough for everything. */
9522 tdep->lowest_pc = 0x20;
9523 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9524
9525 /* The default, for both APCS and AAPCS, is to return small
9526 structures in registers. */
9527 tdep->struct_return = reg_struct_return;
9528
9529 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9530 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9531
9532 if (is_m)
9533 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9534
9535 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9536
9537 frame_base_set_default (gdbarch, &arm_normal_base);
9538
9539 /* Address manipulation. */
9540 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9541
9542 /* Advance PC across function entry code. */
9543 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9544
9545 /* Detect whether PC is at a point where the stack has been destroyed. */
9546 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9547
9548 /* Skip trampolines. */
9549 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9550
9551 /* The stack grows downward. */
9552 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9553
9554 /* Breakpoint manipulation. */
9555 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9556 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9557 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9558 arm_breakpoint_kind_from_current_state);
9559
9560 /* Information about registers, etc. */
9561 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9562 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9563 set_gdbarch_num_regs (gdbarch, register_count);
9564 set_gdbarch_register_type (gdbarch, arm_register_type);
9565 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9566
9567 /* This "info float" is FPA-specific. Use the generic version if we
9568 do not have FPA. */
9569 if (tdep->have_fpa_registers)
9570 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9571
9572 /* Internal <-> external register number maps. */
9573 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9574 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9575
9576 set_gdbarch_register_name (gdbarch, arm_register_name);
9577
9578 /* Returning results. */
9579 set_gdbarch_return_value (gdbarch, arm_return_value);
9580
9581 /* Disassembly. */
9582 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9583
9584 /* Minsymbol frobbing. */
9585 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9586 set_gdbarch_coff_make_msymbol_special (gdbarch,
9587 arm_coff_make_msymbol_special);
9588 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9589
9590 /* Thumb-2 IT block support. */
9591 set_gdbarch_adjust_breakpoint_address (gdbarch,
9592 arm_adjust_breakpoint_address);
9593
9594 /* Virtual tables. */
9595 set_gdbarch_vbit_in_delta (gdbarch, 1);
9596
9597 /* Hook in the ABI-specific overrides, if they have been registered. */
9598 gdbarch_init_osabi (info, gdbarch);
9599
9600 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9601
9602 /* Add some default predicates. */
9603 if (is_m)
9604 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9605 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9606 dwarf2_append_unwinders (gdbarch);
9607 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9608 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9609 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9610
9611 /* Now we have tuned the configuration, set a few final things,
9612 based on what the OS ABI has told us. */
9613
9614 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9615 binaries are always marked. */
9616 if (tdep->arm_abi == ARM_ABI_AUTO)
9617 tdep->arm_abi = ARM_ABI_APCS;
9618
9619 /* Watchpoints are not steppable. */
9620 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9621
9622 /* We used to default to FPA for generic ARM, but almost nobody
9623 uses that now, and we now provide a way for the user to force
9624 the model. So default to the most useful variant. */
9625 if (tdep->fp_model == ARM_FLOAT_AUTO)
9626 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9627
9628 if (tdep->jb_pc >= 0)
9629 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9630
9631 /* Floating point sizes and format. */
9632 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9633 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9634 {
9635 set_gdbarch_double_format
9636 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9637 set_gdbarch_long_double_format
9638 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9639 }
9640 else
9641 {
9642 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9643 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9644 }
9645
9646 if (tdesc_data != nullptr)
9647 {
9648 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9649
9650 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9651 register_count = gdbarch_num_regs (gdbarch);
9652
9653 /* Override tdesc_register_type to adjust the types of VFP
9654 registers for NEON. */
9655 set_gdbarch_register_type (gdbarch, arm_register_type);
9656 }
9657
9658 /* Initialize the pseudo register data. */
9659 int num_pseudos = 0;
9660 if (tdep->have_s_pseudos)
9661 {
9662 /* VFP single precision pseudo registers (S0~S31). */
9663 tdep->s_pseudo_base = register_count;
9664 tdep->s_pseudo_count = 32;
9665 num_pseudos += tdep->s_pseudo_count;
9666
9667 if (tdep->have_q_pseudos)
9668 {
9669 /* NEON quad precision pseudo registers (Q0~Q15). */
9670 tdep->q_pseudo_base = register_count + num_pseudos;
9671
9672 if (have_neon)
9673 tdep->q_pseudo_count = 16;
9674 else if (have_mve)
9675 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
9676
9677 num_pseudos += tdep->q_pseudo_count;
9678 }
9679 }
9680
9681 /* Do we have any MVE pseudo registers? */
9682 if (have_mve)
9683 {
9684 tdep->mve_pseudo_base = register_count + num_pseudos;
9685 tdep->mve_pseudo_count = 1;
9686 num_pseudos += tdep->mve_pseudo_count;
9687 }
9688
9689 /* Set some pseudo register hooks, if we have pseudo registers. */
9690 if (tdep->have_s_pseudos || have_mve)
9691 {
9692 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9693 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9694 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9695 }
9696
9697 /* Add standard register aliases. We add aliases even for those
9698 names which are used by the current architecture - it's simpler,
9699 and does no harm, since nothing ever lists user registers. */
9700 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9701 user_reg_add (gdbarch, arm_register_aliases[i].name,
9702 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9703
9704 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9705 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9706
9707 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9708
9709 return gdbarch;
9710 }
9711
9712 static void
9713 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9714 {
9715 arm_gdbarch_tdep *tdep = (arm_gdbarch_tdep *) gdbarch_tdep (gdbarch);
9716
9717 if (tdep == NULL)
9718 return;
9719
9720 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9721 (int) tdep->fp_model);
9722 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9723 (int) tdep->have_fpa_registers);
9724 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9725 (int) tdep->have_wmmx_registers);
9726 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9727 (int) tdep->vfp_register_count);
9728 fprintf_unfiltered (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
9729 tdep->have_s_pseudos? "true" : "false");
9730 fprintf_unfiltered (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
9731 (int) tdep->s_pseudo_base);
9732 fprintf_unfiltered (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
9733 (int) tdep->s_pseudo_count);
9734 fprintf_unfiltered (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
9735 tdep->have_q_pseudos? "true" : "false");
9736 fprintf_unfiltered (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
9737 (int) tdep->q_pseudo_base);
9738 fprintf_unfiltered (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
9739 (int) tdep->q_pseudo_count);
9740 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9741 (int) tdep->have_neon);
9742 fprintf_unfiltered (file, _("arm_dump_tdep: have_mve = %s\n"),
9743 tdep->have_mve? "yes" : "no");
9744 fprintf_unfiltered (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
9745 tdep->mve_vpr_regnum);
9746 fprintf_unfiltered (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
9747 tdep->mve_pseudo_base);
9748 fprintf_unfiltered (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
9749 tdep->mve_pseudo_count);
9750 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9751 (unsigned long) tdep->lowest_pc);
9752 }
9753
9754 #if GDB_SELF_TEST
9755 namespace selftests
9756 {
9757 static void arm_record_test (void);
9758 static void arm_analyze_prologue_test ();
9759 }
9760 #endif
9761
9762 void _initialize_arm_tdep ();
9763 void
9764 _initialize_arm_tdep ()
9765 {
9766 long length;
9767 int i, j;
9768 char regdesc[1024], *rdptr = regdesc;
9769 size_t rest = sizeof (regdesc);
9770
9771 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9772
9773 /* Add ourselves to objfile event chain. */
9774 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
9775
9776 /* Register an ELF OS ABI sniffer for ARM binaries. */
9777 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9778 bfd_target_elf_flavour,
9779 arm_elf_osabi_sniffer);
9780
9781 /* Add root prefix command for all "set arm"/"show arm" commands. */
9782 add_setshow_prefix_cmd ("arm", no_class,
9783 _("Various ARM-specific commands."),
9784 _("Various ARM-specific commands."),
9785 &setarmcmdlist, &showarmcmdlist,
9786 &setlist, &showlist);
9787
9788 arm_disassembler_options = xstrdup ("reg-names-std");
9789 const disasm_options_t *disasm_options
9790 = &disassembler_options_arm ()->options;
9791 int num_disassembly_styles = 0;
9792 for (i = 0; disasm_options->name[i] != NULL; i++)
9793 if (startswith (disasm_options->name[i], "reg-names-"))
9794 num_disassembly_styles++;
9795
9796 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9797 valid_disassembly_styles = XNEWVEC (const char *,
9798 num_disassembly_styles + 1);
9799 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9800 if (startswith (disasm_options->name[i], "reg-names-"))
9801 {
9802 size_t offset = strlen ("reg-names-");
9803 const char *style = disasm_options->name[i];
9804 valid_disassembly_styles[j++] = &style[offset];
9805 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9806 disasm_options->description[i]);
9807 rdptr += length;
9808 rest -= length;
9809 }
9810 /* Mark the end of valid options. */
9811 valid_disassembly_styles[num_disassembly_styles] = NULL;
9812
9813 /* Create the help text. */
9814 std::string helptext = string_printf ("%s%s%s",
9815 _("The valid values are:\n"),
9816 regdesc,
9817 _("The default is \"std\"."));
9818
9819 add_setshow_enum_cmd("disassembler", no_class,
9820 valid_disassembly_styles, &disassembly_style,
9821 _("Set the disassembly style."),
9822 _("Show the disassembly style."),
9823 helptext.c_str (),
9824 set_disassembly_style_sfunc,
9825 show_disassembly_style_sfunc,
9826 &setarmcmdlist, &showarmcmdlist);
9827
9828 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9829 _("Set usage of ARM 32-bit mode."),
9830 _("Show usage of ARM 32-bit mode."),
9831 _("When off, a 26-bit PC will be used."),
9832 NULL,
9833 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9834 mode is %s. */
9835 &setarmcmdlist, &showarmcmdlist);
9836
9837 /* Add a command to allow the user to force the FPU model. */
9838 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9839 _("Set the floating point type."),
9840 _("Show the floating point type."),
9841 _("auto - Determine the FP typefrom the OS-ABI.\n\
9842 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9843 fpa - FPA co-processor (GCC compiled).\n\
9844 softvfp - Software FP with pure-endian doubles.\n\
9845 vfp - VFP co-processor."),
9846 set_fp_model_sfunc, show_fp_model,
9847 &setarmcmdlist, &showarmcmdlist);
9848
9849 /* Add a command to allow the user to force the ABI. */
9850 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9851 _("Set the ABI."),
9852 _("Show the ABI."),
9853 NULL, arm_set_abi, arm_show_abi,
9854 &setarmcmdlist, &showarmcmdlist);
9855
9856 /* Add two commands to allow the user to force the assumed
9857 execution mode. */
9858 add_setshow_enum_cmd ("fallback-mode", class_support,
9859 arm_mode_strings, &arm_fallback_mode_string,
9860 _("Set the mode assumed when symbols are unavailable."),
9861 _("Show the mode assumed when symbols are unavailable."),
9862 NULL, NULL, arm_show_fallback_mode,
9863 &setarmcmdlist, &showarmcmdlist);
9864 add_setshow_enum_cmd ("force-mode", class_support,
9865 arm_mode_strings, &arm_force_mode_string,
9866 _("Set the mode assumed even when symbols are available."),
9867 _("Show the mode assumed even when symbols are available."),
9868 NULL, NULL, arm_show_force_mode,
9869 &setarmcmdlist, &showarmcmdlist);
9870
9871 /* Debugging flag. */
9872 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9873 _("Set ARM debugging."),
9874 _("Show ARM debugging."),
9875 _("When on, arm-specific debugging is enabled."),
9876 NULL,
9877 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9878 &setdebuglist, &showdebuglist);
9879
9880 #if GDB_SELF_TEST
9881 selftests::register_test ("arm-record", selftests::arm_record_test);
9882 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
9883 #endif
9884
9885 }
9886
9887 /* ARM-reversible process record data structures. */
9888
9889 #define ARM_INSN_SIZE_BYTES 4
9890 #define THUMB_INSN_SIZE_BYTES 2
9891 #define THUMB2_INSN_SIZE_BYTES 4
9892
9893
9894 /* Position of the bit within a 32-bit ARM instruction
9895 that defines whether the instruction is a load or store. */
9896 #define INSN_S_L_BIT_NUM 20
9897
9898 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9899 do \
9900 { \
9901 unsigned int reg_len = LENGTH; \
9902 if (reg_len) \
9903 { \
9904 REGS = XNEWVEC (uint32_t, reg_len); \
9905 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9906 } \
9907 } \
9908 while (0)
9909
9910 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9911 do \
9912 { \
9913 unsigned int mem_len = LENGTH; \
9914 if (mem_len) \
9915 { \
9916 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9917 memcpy(&MEMS->len, &RECORD_BUF[0], \
9918 sizeof(struct arm_mem_r) * LENGTH); \
9919 } \
9920 } \
9921 while (0)
9922
9923 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9924 #define INSN_RECORDED(ARM_RECORD) \
9925 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9926
9927 /* ARM memory record structure. */
9928 struct arm_mem_r
9929 {
9930 uint32_t len; /* Record length. */
9931 uint32_t addr; /* Memory address. */
9932 };
9933
9934 /* ARM instruction record contains opcode of current insn
9935 and execution state (before entry to decode_insn()),
9936 contains list of to-be-modified registers and
9937 memory blocks (on return from decode_insn()). */
9938
9939 typedef struct insn_decode_record_t
9940 {
9941 struct gdbarch *gdbarch;
9942 struct regcache *regcache;
9943 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9944 uint32_t arm_insn; /* Should accommodate thumb. */
9945 uint32_t cond; /* Condition code. */
9946 uint32_t opcode; /* Insn opcode. */
9947 uint32_t decode; /* Insn decode bits. */
9948 uint32_t mem_rec_count; /* No of mem records. */
9949 uint32_t reg_rec_count; /* No of reg records. */
9950 uint32_t *arm_regs; /* Registers to be saved for this record. */
9951 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9952 } insn_decode_record;
9953
9954
9955 /* Checks ARM SBZ and SBO mandatory fields. */
9956
9957 static int
9958 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9959 {
9960 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9961
9962 if (!len)
9963 return 1;
9964
9965 if (!sbo)
9966 ones = ~ones;
9967
9968 while (ones)
9969 {
9970 if (!(ones & sbo))
9971 {
9972 return 0;
9973 }
9974 ones = ones >> 1;
9975 }
9976 return 1;
9977 }
9978
9979 enum arm_record_result
9980 {
9981 ARM_RECORD_SUCCESS = 0,
9982 ARM_RECORD_FAILURE = 1
9983 };
9984
9985 typedef enum
9986 {
9987 ARM_RECORD_STRH=1,
9988 ARM_RECORD_STRD
9989 } arm_record_strx_t;
9990
9991 typedef enum
9992 {
9993 ARM_RECORD=1,
9994 THUMB_RECORD,
9995 THUMB2_RECORD
9996 } record_type_t;
9997
9998
9999 static int
10000 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10001 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10002 {
10003
10004 struct regcache *reg_cache = arm_insn_r->regcache;
10005 ULONGEST u_regval[2]= {0};
10006
10007 uint32_t reg_src1 = 0, reg_src2 = 0;
10008 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10009
10010 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10011 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10012
10013 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10014 {
10015 /* 1) Handle misc store, immediate offset. */
10016 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10017 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10018 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10019 regcache_raw_read_unsigned (reg_cache, reg_src1,
10020 &u_regval[0]);
10021 if (ARM_PC_REGNUM == reg_src1)
10022 {
10023 /* If R15 was used as Rn, hence current PC+8. */
10024 u_regval[0] = u_regval[0] + 8;
10025 }
10026 offset_8 = (immed_high << 4) | immed_low;
10027 /* Calculate target store address. */
10028 if (14 == arm_insn_r->opcode)
10029 {
10030 tgt_mem_addr = u_regval[0] + offset_8;
10031 }
10032 else
10033 {
10034 tgt_mem_addr = u_regval[0] - offset_8;
10035 }
10036 if (ARM_RECORD_STRH == str_type)
10037 {
10038 record_buf_mem[0] = 2;
10039 record_buf_mem[1] = tgt_mem_addr;
10040 arm_insn_r->mem_rec_count = 1;
10041 }
10042 else if (ARM_RECORD_STRD == str_type)
10043 {
10044 record_buf_mem[0] = 4;
10045 record_buf_mem[1] = tgt_mem_addr;
10046 record_buf_mem[2] = 4;
10047 record_buf_mem[3] = tgt_mem_addr + 4;
10048 arm_insn_r->mem_rec_count = 2;
10049 }
10050 }
10051 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10052 {
10053 /* 2) Store, register offset. */
10054 /* Get Rm. */
10055 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10056 /* Get Rn. */
10057 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10058 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10059 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10060 if (15 == reg_src2)
10061 {
10062 /* If R15 was used as Rn, hence current PC+8. */
10063 u_regval[0] = u_regval[0] + 8;
10064 }
10065 /* Calculate target store address, Rn +/- Rm, register offset. */
10066 if (12 == arm_insn_r->opcode)
10067 {
10068 tgt_mem_addr = u_regval[0] + u_regval[1];
10069 }
10070 else
10071 {
10072 tgt_mem_addr = u_regval[1] - u_regval[0];
10073 }
10074 if (ARM_RECORD_STRH == str_type)
10075 {
10076 record_buf_mem[0] = 2;
10077 record_buf_mem[1] = tgt_mem_addr;
10078 arm_insn_r->mem_rec_count = 1;
10079 }
10080 else if (ARM_RECORD_STRD == str_type)
10081 {
10082 record_buf_mem[0] = 4;
10083 record_buf_mem[1] = tgt_mem_addr;
10084 record_buf_mem[2] = 4;
10085 record_buf_mem[3] = tgt_mem_addr + 4;
10086 arm_insn_r->mem_rec_count = 2;
10087 }
10088 }
10089 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10090 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10091 {
10092 /* 3) Store, immediate pre-indexed. */
10093 /* 5) Store, immediate post-indexed. */
10094 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10095 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10096 offset_8 = (immed_high << 4) | immed_low;
10097 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10098 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10099 /* Calculate target store address, Rn +/- Rm, register offset. */
10100 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10101 {
10102 tgt_mem_addr = u_regval[0] + offset_8;
10103 }
10104 else
10105 {
10106 tgt_mem_addr = u_regval[0] - offset_8;
10107 }
10108 if (ARM_RECORD_STRH == str_type)
10109 {
10110 record_buf_mem[0] = 2;
10111 record_buf_mem[1] = tgt_mem_addr;
10112 arm_insn_r->mem_rec_count = 1;
10113 }
10114 else if (ARM_RECORD_STRD == str_type)
10115 {
10116 record_buf_mem[0] = 4;
10117 record_buf_mem[1] = tgt_mem_addr;
10118 record_buf_mem[2] = 4;
10119 record_buf_mem[3] = tgt_mem_addr + 4;
10120 arm_insn_r->mem_rec_count = 2;
10121 }
10122 /* Record Rn also as it changes. */
10123 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10124 arm_insn_r->reg_rec_count = 1;
10125 }
10126 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10127 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10128 {
10129 /* 4) Store, register pre-indexed. */
10130 /* 6) Store, register post -indexed. */
10131 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10132 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10133 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10134 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10135 /* Calculate target store address, Rn +/- Rm, register offset. */
10136 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10137 {
10138 tgt_mem_addr = u_regval[0] + u_regval[1];
10139 }
10140 else
10141 {
10142 tgt_mem_addr = u_regval[1] - u_regval[0];
10143 }
10144 if (ARM_RECORD_STRH == str_type)
10145 {
10146 record_buf_mem[0] = 2;
10147 record_buf_mem[1] = tgt_mem_addr;
10148 arm_insn_r->mem_rec_count = 1;
10149 }
10150 else if (ARM_RECORD_STRD == str_type)
10151 {
10152 record_buf_mem[0] = 4;
10153 record_buf_mem[1] = tgt_mem_addr;
10154 record_buf_mem[2] = 4;
10155 record_buf_mem[3] = tgt_mem_addr + 4;
10156 arm_insn_r->mem_rec_count = 2;
10157 }
10158 /* Record Rn also as it changes. */
10159 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10160 arm_insn_r->reg_rec_count = 1;
10161 }
10162 return 0;
10163 }
10164
10165 /* Handling ARM extension space insns. */
10166
10167 static int
10168 arm_record_extension_space (insn_decode_record *arm_insn_r)
10169 {
10170 int ret = 0; /* Return value: -1:record failure ; 0:success */
10171 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10172 uint32_t record_buf[8], record_buf_mem[8];
10173 uint32_t reg_src1 = 0;
10174 struct regcache *reg_cache = arm_insn_r->regcache;
10175 ULONGEST u_regval = 0;
10176
10177 gdb_assert (!INSN_RECORDED(arm_insn_r));
10178 /* Handle unconditional insn extension space. */
10179
10180 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10181 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10182 if (arm_insn_r->cond)
10183 {
10184 /* PLD has no affect on architectural state, it just affects
10185 the caches. */
10186 if (5 == ((opcode1 & 0xE0) >> 5))
10187 {
10188 /* BLX(1) */
10189 record_buf[0] = ARM_PS_REGNUM;
10190 record_buf[1] = ARM_LR_REGNUM;
10191 arm_insn_r->reg_rec_count = 2;
10192 }
10193 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10194 }
10195
10196
10197 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10198 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10199 {
10200 ret = -1;
10201 /* Undefined instruction on ARM V5; need to handle if later
10202 versions define it. */
10203 }
10204
10205 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10206 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10207 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10208
10209 /* Handle arithmetic insn extension space. */
10210 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10211 && !INSN_RECORDED(arm_insn_r))
10212 {
10213 /* Handle MLA(S) and MUL(S). */
10214 if (in_inclusive_range (insn_op1, 0U, 3U))
10215 {
10216 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10217 record_buf[1] = ARM_PS_REGNUM;
10218 arm_insn_r->reg_rec_count = 2;
10219 }
10220 else if (in_inclusive_range (insn_op1, 4U, 15U))
10221 {
10222 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10223 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10224 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10225 record_buf[2] = ARM_PS_REGNUM;
10226 arm_insn_r->reg_rec_count = 3;
10227 }
10228 }
10229
10230 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10231 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10232 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10233
10234 /* Handle control insn extension space. */
10235
10236 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10237 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10238 {
10239 if (!bit (arm_insn_r->arm_insn,25))
10240 {
10241 if (!bits (arm_insn_r->arm_insn, 4, 7))
10242 {
10243 if ((0 == insn_op1) || (2 == insn_op1))
10244 {
10245 /* MRS. */
10246 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10247 arm_insn_r->reg_rec_count = 1;
10248 }
10249 else if (1 == insn_op1)
10250 {
10251 /* CSPR is going to be changed. */
10252 record_buf[0] = ARM_PS_REGNUM;
10253 arm_insn_r->reg_rec_count = 1;
10254 }
10255 else if (3 == insn_op1)
10256 {
10257 /* SPSR is going to be changed. */
10258 /* We need to get SPSR value, which is yet to be done. */
10259 return -1;
10260 }
10261 }
10262 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10263 {
10264 if (1 == insn_op1)
10265 {
10266 /* BX. */
10267 record_buf[0] = ARM_PS_REGNUM;
10268 arm_insn_r->reg_rec_count = 1;
10269 }
10270 else if (3 == insn_op1)
10271 {
10272 /* CLZ. */
10273 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10274 arm_insn_r->reg_rec_count = 1;
10275 }
10276 }
10277 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10278 {
10279 /* BLX. */
10280 record_buf[0] = ARM_PS_REGNUM;
10281 record_buf[1] = ARM_LR_REGNUM;
10282 arm_insn_r->reg_rec_count = 2;
10283 }
10284 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10285 {
10286 /* QADD, QSUB, QDADD, QDSUB */
10287 record_buf[0] = ARM_PS_REGNUM;
10288 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10289 arm_insn_r->reg_rec_count = 2;
10290 }
10291 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10292 {
10293 /* BKPT. */
10294 record_buf[0] = ARM_PS_REGNUM;
10295 record_buf[1] = ARM_LR_REGNUM;
10296 arm_insn_r->reg_rec_count = 2;
10297
10298 /* Save SPSR also;how? */
10299 return -1;
10300 }
10301 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10302 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10303 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10304 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10305 )
10306 {
10307 if (0 == insn_op1 || 1 == insn_op1)
10308 {
10309 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10310 /* We dont do optimization for SMULW<y> where we
10311 need only Rd. */
10312 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10313 record_buf[1] = ARM_PS_REGNUM;
10314 arm_insn_r->reg_rec_count = 2;
10315 }
10316 else if (2 == insn_op1)
10317 {
10318 /* SMLAL<x><y>. */
10319 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10320 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10321 arm_insn_r->reg_rec_count = 2;
10322 }
10323 else if (3 == insn_op1)
10324 {
10325 /* SMUL<x><y>. */
10326 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10327 arm_insn_r->reg_rec_count = 1;
10328 }
10329 }
10330 }
10331 else
10332 {
10333 /* MSR : immediate form. */
10334 if (1 == insn_op1)
10335 {
10336 /* CSPR is going to be changed. */
10337 record_buf[0] = ARM_PS_REGNUM;
10338 arm_insn_r->reg_rec_count = 1;
10339 }
10340 else if (3 == insn_op1)
10341 {
10342 /* SPSR is going to be changed. */
10343 /* we need to get SPSR value, which is yet to be done */
10344 return -1;
10345 }
10346 }
10347 }
10348
10349 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10350 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10351 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10352
10353 /* Handle load/store insn extension space. */
10354
10355 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10356 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10357 && !INSN_RECORDED(arm_insn_r))
10358 {
10359 /* SWP/SWPB. */
10360 if (0 == insn_op1)
10361 {
10362 /* These insn, changes register and memory as well. */
10363 /* SWP or SWPB insn. */
10364 /* Get memory address given by Rn. */
10365 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10366 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10367 /* SWP insn ?, swaps word. */
10368 if (8 == arm_insn_r->opcode)
10369 {
10370 record_buf_mem[0] = 4;
10371 }
10372 else
10373 {
10374 /* SWPB insn, swaps only byte. */
10375 record_buf_mem[0] = 1;
10376 }
10377 record_buf_mem[1] = u_regval;
10378 arm_insn_r->mem_rec_count = 1;
10379 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10380 arm_insn_r->reg_rec_count = 1;
10381 }
10382 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10383 {
10384 /* STRH. */
10385 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10386 ARM_RECORD_STRH);
10387 }
10388 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10389 {
10390 /* LDRD. */
10391 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10392 record_buf[1] = record_buf[0] + 1;
10393 arm_insn_r->reg_rec_count = 2;
10394 }
10395 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10396 {
10397 /* STRD. */
10398 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10399 ARM_RECORD_STRD);
10400 }
10401 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10402 {
10403 /* LDRH, LDRSB, LDRSH. */
10404 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10405 arm_insn_r->reg_rec_count = 1;
10406 }
10407
10408 }
10409
10410 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10411 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10412 && !INSN_RECORDED(arm_insn_r))
10413 {
10414 ret = -1;
10415 /* Handle coprocessor insn extension space. */
10416 }
10417
10418 /* To be done for ARMv5 and later; as of now we return -1. */
10419 if (-1 == ret)
10420 return ret;
10421
10422 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10423 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10424
10425 return ret;
10426 }
10427
10428 /* Handling opcode 000 insns. */
10429
10430 static int
10431 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10432 {
10433 struct regcache *reg_cache = arm_insn_r->regcache;
10434 uint32_t record_buf[8], record_buf_mem[8];
10435 ULONGEST u_regval[2] = {0};
10436
10437 uint32_t reg_src1 = 0;
10438 uint32_t opcode1 = 0;
10439
10440 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10441 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10442 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10443
10444 if (!((opcode1 & 0x19) == 0x10))
10445 {
10446 /* Data-processing (register) and Data-processing (register-shifted
10447 register */
10448 /* Out of 11 shifter operands mode, all the insn modifies destination
10449 register, which is specified by 13-16 decode. */
10450 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10451 record_buf[1] = ARM_PS_REGNUM;
10452 arm_insn_r->reg_rec_count = 2;
10453 }
10454 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10455 {
10456 /* Miscellaneous instructions */
10457
10458 if (3 == arm_insn_r->decode && 0x12 == opcode1
10459 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10460 {
10461 /* Handle BLX, branch and link/exchange. */
10462 if (9 == arm_insn_r->opcode)
10463 {
10464 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10465 and R14 stores the return address. */
10466 record_buf[0] = ARM_PS_REGNUM;
10467 record_buf[1] = ARM_LR_REGNUM;
10468 arm_insn_r->reg_rec_count = 2;
10469 }
10470 }
10471 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10472 {
10473 /* Handle enhanced software breakpoint insn, BKPT. */
10474 /* CPSR is changed to be executed in ARM state, disabling normal
10475 interrupts, entering abort mode. */
10476 /* According to high vector configuration PC is set. */
10477 /* user hit breakpoint and type reverse, in
10478 that case, we need to go back with previous CPSR and
10479 Program Counter. */
10480 record_buf[0] = ARM_PS_REGNUM;
10481 record_buf[1] = ARM_LR_REGNUM;
10482 arm_insn_r->reg_rec_count = 2;
10483
10484 /* Save SPSR also; how? */
10485 return -1;
10486 }
10487 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10488 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10489 {
10490 /* Handle BX, branch and link/exchange. */
10491 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10492 record_buf[0] = ARM_PS_REGNUM;
10493 arm_insn_r->reg_rec_count = 1;
10494 }
10495 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10496 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10497 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10498 {
10499 /* Count leading zeros: CLZ. */
10500 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10501 arm_insn_r->reg_rec_count = 1;
10502 }
10503 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10504 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10505 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10506 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10507 {
10508 /* Handle MRS insn. */
10509 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10510 arm_insn_r->reg_rec_count = 1;
10511 }
10512 }
10513 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10514 {
10515 /* Multiply and multiply-accumulate */
10516
10517 /* Handle multiply instructions. */
10518 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10519 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10520 {
10521 /* Handle MLA and MUL. */
10522 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10523 record_buf[1] = ARM_PS_REGNUM;
10524 arm_insn_r->reg_rec_count = 2;
10525 }
10526 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10527 {
10528 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10529 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10530 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10531 record_buf[2] = ARM_PS_REGNUM;
10532 arm_insn_r->reg_rec_count = 3;
10533 }
10534 }
10535 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10536 {
10537 /* Synchronization primitives */
10538
10539 /* Handling SWP, SWPB. */
10540 /* These insn, changes register and memory as well. */
10541 /* SWP or SWPB insn. */
10542
10543 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10544 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10545 /* SWP insn ?, swaps word. */
10546 if (8 == arm_insn_r->opcode)
10547 {
10548 record_buf_mem[0] = 4;
10549 }
10550 else
10551 {
10552 /* SWPB insn, swaps only byte. */
10553 record_buf_mem[0] = 1;
10554 }
10555 record_buf_mem[1] = u_regval[0];
10556 arm_insn_r->mem_rec_count = 1;
10557 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10558 arm_insn_r->reg_rec_count = 1;
10559 }
10560 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10561 || 15 == arm_insn_r->decode)
10562 {
10563 if ((opcode1 & 0x12) == 2)
10564 {
10565 /* Extra load/store (unprivileged) */
10566 return -1;
10567 }
10568 else
10569 {
10570 /* Extra load/store */
10571 switch (bits (arm_insn_r->arm_insn, 5, 6))
10572 {
10573 case 1:
10574 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10575 {
10576 /* STRH (register), STRH (immediate) */
10577 arm_record_strx (arm_insn_r, &record_buf[0],
10578 &record_buf_mem[0], ARM_RECORD_STRH);
10579 }
10580 else if ((opcode1 & 0x05) == 0x1)
10581 {
10582 /* LDRH (register) */
10583 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10584 arm_insn_r->reg_rec_count = 1;
10585
10586 if (bit (arm_insn_r->arm_insn, 21))
10587 {
10588 /* Write back to Rn. */
10589 record_buf[arm_insn_r->reg_rec_count++]
10590 = bits (arm_insn_r->arm_insn, 16, 19);
10591 }
10592 }
10593 else if ((opcode1 & 0x05) == 0x5)
10594 {
10595 /* LDRH (immediate), LDRH (literal) */
10596 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10597
10598 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10599 arm_insn_r->reg_rec_count = 1;
10600
10601 if (rn != 15)
10602 {
10603 /*LDRH (immediate) */
10604 if (bit (arm_insn_r->arm_insn, 21))
10605 {
10606 /* Write back to Rn. */
10607 record_buf[arm_insn_r->reg_rec_count++] = rn;
10608 }
10609 }
10610 }
10611 else
10612 return -1;
10613 break;
10614 case 2:
10615 if ((opcode1 & 0x05) == 0x0)
10616 {
10617 /* LDRD (register) */
10618 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10619 record_buf[1] = record_buf[0] + 1;
10620 arm_insn_r->reg_rec_count = 2;
10621
10622 if (bit (arm_insn_r->arm_insn, 21))
10623 {
10624 /* Write back to Rn. */
10625 record_buf[arm_insn_r->reg_rec_count++]
10626 = bits (arm_insn_r->arm_insn, 16, 19);
10627 }
10628 }
10629 else if ((opcode1 & 0x05) == 0x1)
10630 {
10631 /* LDRSB (register) */
10632 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10633 arm_insn_r->reg_rec_count = 1;
10634
10635 if (bit (arm_insn_r->arm_insn, 21))
10636 {
10637 /* Write back to Rn. */
10638 record_buf[arm_insn_r->reg_rec_count++]
10639 = bits (arm_insn_r->arm_insn, 16, 19);
10640 }
10641 }
10642 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10643 {
10644 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10645 LDRSB (literal) */
10646 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10647
10648 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10649 arm_insn_r->reg_rec_count = 1;
10650
10651 if (rn != 15)
10652 {
10653 /*LDRD (immediate), LDRSB (immediate) */
10654 if (bit (arm_insn_r->arm_insn, 21))
10655 {
10656 /* Write back to Rn. */
10657 record_buf[arm_insn_r->reg_rec_count++] = rn;
10658 }
10659 }
10660 }
10661 else
10662 return -1;
10663 break;
10664 case 3:
10665 if ((opcode1 & 0x05) == 0x0)
10666 {
10667 /* STRD (register) */
10668 arm_record_strx (arm_insn_r, &record_buf[0],
10669 &record_buf_mem[0], ARM_RECORD_STRD);
10670 }
10671 else if ((opcode1 & 0x05) == 0x1)
10672 {
10673 /* LDRSH (register) */
10674 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10675 arm_insn_r->reg_rec_count = 1;
10676
10677 if (bit (arm_insn_r->arm_insn, 21))
10678 {
10679 /* Write back to Rn. */
10680 record_buf[arm_insn_r->reg_rec_count++]
10681 = bits (arm_insn_r->arm_insn, 16, 19);
10682 }
10683 }
10684 else if ((opcode1 & 0x05) == 0x4)
10685 {
10686 /* STRD (immediate) */
10687 arm_record_strx (arm_insn_r, &record_buf[0],
10688 &record_buf_mem[0], ARM_RECORD_STRD);
10689 }
10690 else if ((opcode1 & 0x05) == 0x5)
10691 {
10692 /* LDRSH (immediate), LDRSH (literal) */
10693 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10694 arm_insn_r->reg_rec_count = 1;
10695
10696 if (bit (arm_insn_r->arm_insn, 21))
10697 {
10698 /* Write back to Rn. */
10699 record_buf[arm_insn_r->reg_rec_count++]
10700 = bits (arm_insn_r->arm_insn, 16, 19);
10701 }
10702 }
10703 else
10704 return -1;
10705 break;
10706 default:
10707 return -1;
10708 }
10709 }
10710 }
10711 else
10712 {
10713 return -1;
10714 }
10715
10716 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10717 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10718 return 0;
10719 }
10720
10721 /* Handling opcode 001 insns. */
10722
10723 static int
10724 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10725 {
10726 uint32_t record_buf[8], record_buf_mem[8];
10727
10728 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10729 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10730
10731 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10732 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10733 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10734 )
10735 {
10736 /* Handle MSR insn. */
10737 if (9 == arm_insn_r->opcode)
10738 {
10739 /* CSPR is going to be changed. */
10740 record_buf[0] = ARM_PS_REGNUM;
10741 arm_insn_r->reg_rec_count = 1;
10742 }
10743 else
10744 {
10745 /* SPSR is going to be changed. */
10746 }
10747 }
10748 else if (arm_insn_r->opcode <= 15)
10749 {
10750 /* Normal data processing insns. */
10751 /* Out of 11 shifter operands mode, all the insn modifies destination
10752 register, which is specified by 13-16 decode. */
10753 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10754 record_buf[1] = ARM_PS_REGNUM;
10755 arm_insn_r->reg_rec_count = 2;
10756 }
10757 else
10758 {
10759 return -1;
10760 }
10761
10762 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10763 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10764 return 0;
10765 }
10766
10767 static int
10768 arm_record_media (insn_decode_record *arm_insn_r)
10769 {
10770 uint32_t record_buf[8];
10771
10772 switch (bits (arm_insn_r->arm_insn, 22, 24))
10773 {
10774 case 0:
10775 /* Parallel addition and subtraction, signed */
10776 case 1:
10777 /* Parallel addition and subtraction, unsigned */
10778 case 2:
10779 case 3:
10780 /* Packing, unpacking, saturation and reversal */
10781 {
10782 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10783
10784 record_buf[arm_insn_r->reg_rec_count++] = rd;
10785 }
10786 break;
10787
10788 case 4:
10789 case 5:
10790 /* Signed multiplies */
10791 {
10792 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10793 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10794
10795 record_buf[arm_insn_r->reg_rec_count++] = rd;
10796 if (op1 == 0x0)
10797 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10798 else if (op1 == 0x4)
10799 record_buf[arm_insn_r->reg_rec_count++]
10800 = bits (arm_insn_r->arm_insn, 12, 15);
10801 }
10802 break;
10803
10804 case 6:
10805 {
10806 if (bit (arm_insn_r->arm_insn, 21)
10807 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10808 {
10809 /* SBFX */
10810 record_buf[arm_insn_r->reg_rec_count++]
10811 = bits (arm_insn_r->arm_insn, 12, 15);
10812 }
10813 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10814 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10815 {
10816 /* USAD8 and USADA8 */
10817 record_buf[arm_insn_r->reg_rec_count++]
10818 = bits (arm_insn_r->arm_insn, 16, 19);
10819 }
10820 }
10821 break;
10822
10823 case 7:
10824 {
10825 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10826 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10827 {
10828 /* Permanently UNDEFINED */
10829 return -1;
10830 }
10831 else
10832 {
10833 /* BFC, BFI and UBFX */
10834 record_buf[arm_insn_r->reg_rec_count++]
10835 = bits (arm_insn_r->arm_insn, 12, 15);
10836 }
10837 }
10838 break;
10839
10840 default:
10841 return -1;
10842 }
10843
10844 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10845
10846 return 0;
10847 }
10848
10849 /* Handle ARM mode instructions with opcode 010. */
10850
10851 static int
10852 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10853 {
10854 struct regcache *reg_cache = arm_insn_r->regcache;
10855
10856 uint32_t reg_base , reg_dest;
10857 uint32_t offset_12, tgt_mem_addr;
10858 uint32_t record_buf[8], record_buf_mem[8];
10859 unsigned char wback;
10860 ULONGEST u_regval;
10861
10862 /* Calculate wback. */
10863 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10864 || (bit (arm_insn_r->arm_insn, 21) == 1);
10865
10866 arm_insn_r->reg_rec_count = 0;
10867 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10868
10869 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10870 {
10871 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10872 and LDRT. */
10873
10874 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10875 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10876
10877 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10878 preceeds a LDR instruction having R15 as reg_base, it
10879 emulates a branch and link instruction, and hence we need to save
10880 CPSR and PC as well. */
10881 if (ARM_PC_REGNUM == reg_dest)
10882 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10883
10884 /* If wback is true, also save the base register, which is going to be
10885 written to. */
10886 if (wback)
10887 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10888 }
10889 else
10890 {
10891 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10892
10893 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10894 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10895
10896 /* Handle bit U. */
10897 if (bit (arm_insn_r->arm_insn, 23))
10898 {
10899 /* U == 1: Add the offset. */
10900 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10901 }
10902 else
10903 {
10904 /* U == 0: subtract the offset. */
10905 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10906 }
10907
10908 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10909 bytes. */
10910 if (bit (arm_insn_r->arm_insn, 22))
10911 {
10912 /* STRB and STRBT: 1 byte. */
10913 record_buf_mem[0] = 1;
10914 }
10915 else
10916 {
10917 /* STR and STRT: 4 bytes. */
10918 record_buf_mem[0] = 4;
10919 }
10920
10921 /* Handle bit P. */
10922 if (bit (arm_insn_r->arm_insn, 24))
10923 record_buf_mem[1] = tgt_mem_addr;
10924 else
10925 record_buf_mem[1] = (uint32_t) u_regval;
10926
10927 arm_insn_r->mem_rec_count = 1;
10928
10929 /* If wback is true, also save the base register, which is going to be
10930 written to. */
10931 if (wback)
10932 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10933 }
10934
10935 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10936 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10937 return 0;
10938 }
10939
10940 /* Handling opcode 011 insns. */
10941
10942 static int
10943 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10944 {
10945 struct regcache *reg_cache = arm_insn_r->regcache;
10946
10947 uint32_t shift_imm = 0;
10948 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10949 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10950 uint32_t record_buf[8], record_buf_mem[8];
10951
10952 LONGEST s_word;
10953 ULONGEST u_regval[2];
10954
10955 if (bit (arm_insn_r->arm_insn, 4))
10956 return arm_record_media (arm_insn_r);
10957
10958 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10959 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10960
10961 /* Handle enhanced store insns and LDRD DSP insn,
10962 order begins according to addressing modes for store insns
10963 STRH insn. */
10964
10965 /* LDR or STR? */
10966 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10967 {
10968 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10969 /* LDR insn has a capability to do branching, if
10970 MOV LR, PC is preceded by LDR insn having Rn as R15
10971 in that case, it emulates branch and link insn, and hence we
10972 need to save CSPR and PC as well. */
10973 if (15 != reg_dest)
10974 {
10975 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10976 arm_insn_r->reg_rec_count = 1;
10977 }
10978 else
10979 {
10980 record_buf[0] = reg_dest;
10981 record_buf[1] = ARM_PS_REGNUM;
10982 arm_insn_r->reg_rec_count = 2;
10983 }
10984 }
10985 else
10986 {
10987 if (! bits (arm_insn_r->arm_insn, 4, 11))
10988 {
10989 /* Store insn, register offset and register pre-indexed,
10990 register post-indexed. */
10991 /* Get Rm. */
10992 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10993 /* Get Rn. */
10994 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10995 regcache_raw_read_unsigned (reg_cache, reg_src1
10996 , &u_regval[0]);
10997 regcache_raw_read_unsigned (reg_cache, reg_src2
10998 , &u_regval[1]);
10999 if (15 == reg_src2)
11000 {
11001 /* If R15 was used as Rn, hence current PC+8. */
11002 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11003 u_regval[0] = u_regval[0] + 8;
11004 }
11005 /* Calculate target store address, Rn +/- Rm, register offset. */
11006 /* U == 1. */
11007 if (bit (arm_insn_r->arm_insn, 23))
11008 {
11009 tgt_mem_addr = u_regval[0] + u_regval[1];
11010 }
11011 else
11012 {
11013 tgt_mem_addr = u_regval[1] - u_regval[0];
11014 }
11015
11016 switch (arm_insn_r->opcode)
11017 {
11018 /* STR. */
11019 case 8:
11020 case 12:
11021 /* STR. */
11022 case 9:
11023 case 13:
11024 /* STRT. */
11025 case 1:
11026 case 5:
11027 /* STR. */
11028 case 0:
11029 case 4:
11030 record_buf_mem[0] = 4;
11031 break;
11032
11033 /* STRB. */
11034 case 10:
11035 case 14:
11036 /* STRB. */
11037 case 11:
11038 case 15:
11039 /* STRBT. */
11040 case 3:
11041 case 7:
11042 /* STRB. */
11043 case 2:
11044 case 6:
11045 record_buf_mem[0] = 1;
11046 break;
11047
11048 default:
11049 gdb_assert_not_reached ("no decoding pattern found");
11050 break;
11051 }
11052 record_buf_mem[1] = tgt_mem_addr;
11053 arm_insn_r->mem_rec_count = 1;
11054
11055 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11056 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11057 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11058 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11059 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11060 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11061 )
11062 {
11063 /* Rn is going to be changed in pre-indexed mode and
11064 post-indexed mode as well. */
11065 record_buf[0] = reg_src2;
11066 arm_insn_r->reg_rec_count = 1;
11067 }
11068 }
11069 else
11070 {
11071 /* Store insn, scaled register offset; scaled pre-indexed. */
11072 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11073 /* Get Rm. */
11074 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11075 /* Get Rn. */
11076 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11077 /* Get shift_imm. */
11078 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11079 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11080 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11081 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11082 /* Offset_12 used as shift. */
11083 switch (offset_12)
11084 {
11085 case 0:
11086 /* Offset_12 used as index. */
11087 offset_12 = u_regval[0] << shift_imm;
11088 break;
11089
11090 case 1:
11091 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11092 break;
11093
11094 case 2:
11095 if (!shift_imm)
11096 {
11097 if (bit (u_regval[0], 31))
11098 {
11099 offset_12 = 0xFFFFFFFF;
11100 }
11101 else
11102 {
11103 offset_12 = 0;
11104 }
11105 }
11106 else
11107 {
11108 /* This is arithmetic shift. */
11109 offset_12 = s_word >> shift_imm;
11110 }
11111 break;
11112
11113 case 3:
11114 if (!shift_imm)
11115 {
11116 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11117 &u_regval[1]);
11118 /* Get C flag value and shift it by 31. */
11119 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11120 | (u_regval[0]) >> 1);
11121 }
11122 else
11123 {
11124 offset_12 = (u_regval[0] >> shift_imm) \
11125 | (u_regval[0] <<
11126 (sizeof(uint32_t) - shift_imm));
11127 }
11128 break;
11129
11130 default:
11131 gdb_assert_not_reached ("no decoding pattern found");
11132 break;
11133 }
11134
11135 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11136 /* bit U set. */
11137 if (bit (arm_insn_r->arm_insn, 23))
11138 {
11139 tgt_mem_addr = u_regval[1] + offset_12;
11140 }
11141 else
11142 {
11143 tgt_mem_addr = u_regval[1] - offset_12;
11144 }
11145
11146 switch (arm_insn_r->opcode)
11147 {
11148 /* STR. */
11149 case 8:
11150 case 12:
11151 /* STR. */
11152 case 9:
11153 case 13:
11154 /* STRT. */
11155 case 1:
11156 case 5:
11157 /* STR. */
11158 case 0:
11159 case 4:
11160 record_buf_mem[0] = 4;
11161 break;
11162
11163 /* STRB. */
11164 case 10:
11165 case 14:
11166 /* STRB. */
11167 case 11:
11168 case 15:
11169 /* STRBT. */
11170 case 3:
11171 case 7:
11172 /* STRB. */
11173 case 2:
11174 case 6:
11175 record_buf_mem[0] = 1;
11176 break;
11177
11178 default:
11179 gdb_assert_not_reached ("no decoding pattern found");
11180 break;
11181 }
11182 record_buf_mem[1] = tgt_mem_addr;
11183 arm_insn_r->mem_rec_count = 1;
11184
11185 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11186 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11187 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11188 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11189 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11190 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11191 )
11192 {
11193 /* Rn is going to be changed in register scaled pre-indexed
11194 mode,and scaled post indexed mode. */
11195 record_buf[0] = reg_src2;
11196 arm_insn_r->reg_rec_count = 1;
11197 }
11198 }
11199 }
11200
11201 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11202 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11203 return 0;
11204 }
11205
11206 /* Handle ARM mode instructions with opcode 100. */
11207
11208 static int
11209 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11210 {
11211 struct regcache *reg_cache = arm_insn_r->regcache;
11212 uint32_t register_count = 0, register_bits;
11213 uint32_t reg_base, addr_mode;
11214 uint32_t record_buf[24], record_buf_mem[48];
11215 uint32_t wback;
11216 ULONGEST u_regval;
11217
11218 /* Fetch the list of registers. */
11219 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11220 arm_insn_r->reg_rec_count = 0;
11221
11222 /* Fetch the base register that contains the address we are loading data
11223 to. */
11224 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11225
11226 /* Calculate wback. */
11227 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11228
11229 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11230 {
11231 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11232
11233 /* Find out which registers are going to be loaded from memory. */
11234 while (register_bits)
11235 {
11236 if (register_bits & 0x00000001)
11237 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11238 register_bits = register_bits >> 1;
11239 register_count++;
11240 }
11241
11242
11243 /* If wback is true, also save the base register, which is going to be
11244 written to. */
11245 if (wback)
11246 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11247
11248 /* Save the CPSR register. */
11249 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11250 }
11251 else
11252 {
11253 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11254
11255 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11256
11257 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11258
11259 /* Find out how many registers are going to be stored to memory. */
11260 while (register_bits)
11261 {
11262 if (register_bits & 0x00000001)
11263 register_count++;
11264 register_bits = register_bits >> 1;
11265 }
11266
11267 switch (addr_mode)
11268 {
11269 /* STMDA (STMED): Decrement after. */
11270 case 0:
11271 record_buf_mem[1] = (uint32_t) u_regval
11272 - register_count * ARM_INT_REGISTER_SIZE + 4;
11273 break;
11274 /* STM (STMIA, STMEA): Increment after. */
11275 case 1:
11276 record_buf_mem[1] = (uint32_t) u_regval;
11277 break;
11278 /* STMDB (STMFD): Decrement before. */
11279 case 2:
11280 record_buf_mem[1] = (uint32_t) u_regval
11281 - register_count * ARM_INT_REGISTER_SIZE;
11282 break;
11283 /* STMIB (STMFA): Increment before. */
11284 case 3:
11285 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11286 break;
11287 default:
11288 gdb_assert_not_reached ("no decoding pattern found");
11289 break;
11290 }
11291
11292 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11293 arm_insn_r->mem_rec_count = 1;
11294
11295 /* If wback is true, also save the base register, which is going to be
11296 written to. */
11297 if (wback)
11298 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11299 }
11300
11301 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11302 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11303 return 0;
11304 }
11305
11306 /* Handling opcode 101 insns. */
11307
11308 static int
11309 arm_record_b_bl (insn_decode_record *arm_insn_r)
11310 {
11311 uint32_t record_buf[8];
11312
11313 /* Handle B, BL, BLX(1) insns. */
11314 /* B simply branches so we do nothing here. */
11315 /* Note: BLX(1) doesnt fall here but instead it falls into
11316 extension space. */
11317 if (bit (arm_insn_r->arm_insn, 24))
11318 {
11319 record_buf[0] = ARM_LR_REGNUM;
11320 arm_insn_r->reg_rec_count = 1;
11321 }
11322
11323 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11324
11325 return 0;
11326 }
11327
11328 static int
11329 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11330 {
11331 printf_unfiltered (_("Process record does not support instruction "
11332 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11333 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11334
11335 return -1;
11336 }
11337
11338 /* Record handler for vector data transfer instructions. */
11339
11340 static int
11341 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11342 {
11343 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11344 uint32_t record_buf[4];
11345
11346 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11347 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11348 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11349 bit_l = bit (arm_insn_r->arm_insn, 20);
11350 bit_c = bit (arm_insn_r->arm_insn, 8);
11351
11352 /* Handle VMOV instruction. */
11353 if (bit_l && bit_c)
11354 {
11355 record_buf[0] = reg_t;
11356 arm_insn_r->reg_rec_count = 1;
11357 }
11358 else if (bit_l && !bit_c)
11359 {
11360 /* Handle VMOV instruction. */
11361 if (bits_a == 0x00)
11362 {
11363 record_buf[0] = reg_t;
11364 arm_insn_r->reg_rec_count = 1;
11365 }
11366 /* Handle VMRS instruction. */
11367 else if (bits_a == 0x07)
11368 {
11369 if (reg_t == 15)
11370 reg_t = ARM_PS_REGNUM;
11371
11372 record_buf[0] = reg_t;
11373 arm_insn_r->reg_rec_count = 1;
11374 }
11375 }
11376 else if (!bit_l && !bit_c)
11377 {
11378 /* Handle VMOV instruction. */
11379 if (bits_a == 0x00)
11380 {
11381 record_buf[0] = ARM_D0_REGNUM + reg_v;
11382
11383 arm_insn_r->reg_rec_count = 1;
11384 }
11385 /* Handle VMSR instruction. */
11386 else if (bits_a == 0x07)
11387 {
11388 record_buf[0] = ARM_FPSCR_REGNUM;
11389 arm_insn_r->reg_rec_count = 1;
11390 }
11391 }
11392 else if (!bit_l && bit_c)
11393 {
11394 /* Handle VMOV instruction. */
11395 if (!(bits_a & 0x04))
11396 {
11397 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11398 + ARM_D0_REGNUM;
11399 arm_insn_r->reg_rec_count = 1;
11400 }
11401 /* Handle VDUP instruction. */
11402 else
11403 {
11404 if (bit (arm_insn_r->arm_insn, 21))
11405 {
11406 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11407 record_buf[0] = reg_v + ARM_D0_REGNUM;
11408 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11409 arm_insn_r->reg_rec_count = 2;
11410 }
11411 else
11412 {
11413 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11414 record_buf[0] = reg_v + ARM_D0_REGNUM;
11415 arm_insn_r->reg_rec_count = 1;
11416 }
11417 }
11418 }
11419
11420 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11421 return 0;
11422 }
11423
11424 /* Record handler for extension register load/store instructions. */
11425
11426 static int
11427 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11428 {
11429 uint32_t opcode, single_reg;
11430 uint8_t op_vldm_vstm;
11431 uint32_t record_buf[8], record_buf_mem[128];
11432 ULONGEST u_regval = 0;
11433
11434 struct regcache *reg_cache = arm_insn_r->regcache;
11435
11436 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11437 single_reg = !bit (arm_insn_r->arm_insn, 8);
11438 op_vldm_vstm = opcode & 0x1b;
11439
11440 /* Handle VMOV instructions. */
11441 if ((opcode & 0x1e) == 0x04)
11442 {
11443 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11444 {
11445 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11446 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11447 arm_insn_r->reg_rec_count = 2;
11448 }
11449 else
11450 {
11451 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11452 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11453
11454 if (single_reg)
11455 {
11456 /* The first S register number m is REG_M:M (M is bit 5),
11457 the corresponding D register number is REG_M:M / 2, which
11458 is REG_M. */
11459 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11460 /* The second S register number is REG_M:M + 1, the
11461 corresponding D register number is (REG_M:M + 1) / 2.
11462 IOW, if bit M is 1, the first and second S registers
11463 are mapped to different D registers, otherwise, they are
11464 in the same D register. */
11465 if (bit_m)
11466 {
11467 record_buf[arm_insn_r->reg_rec_count++]
11468 = ARM_D0_REGNUM + reg_m + 1;
11469 }
11470 }
11471 else
11472 {
11473 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11474 arm_insn_r->reg_rec_count = 1;
11475 }
11476 }
11477 }
11478 /* Handle VSTM and VPUSH instructions. */
11479 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11480 || op_vldm_vstm == 0x12)
11481 {
11482 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11483 uint32_t memory_index = 0;
11484
11485 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11486 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11487 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11488 imm_off32 = imm_off8 << 2;
11489 memory_count = imm_off8;
11490
11491 if (bit (arm_insn_r->arm_insn, 23))
11492 start_address = u_regval;
11493 else
11494 start_address = u_regval - imm_off32;
11495
11496 if (bit (arm_insn_r->arm_insn, 21))
11497 {
11498 record_buf[0] = reg_rn;
11499 arm_insn_r->reg_rec_count = 1;
11500 }
11501
11502 while (memory_count > 0)
11503 {
11504 if (single_reg)
11505 {
11506 record_buf_mem[memory_index] = 4;
11507 record_buf_mem[memory_index + 1] = start_address;
11508 start_address = start_address + 4;
11509 memory_index = memory_index + 2;
11510 }
11511 else
11512 {
11513 record_buf_mem[memory_index] = 4;
11514 record_buf_mem[memory_index + 1] = start_address;
11515 record_buf_mem[memory_index + 2] = 4;
11516 record_buf_mem[memory_index + 3] = start_address + 4;
11517 start_address = start_address + 8;
11518 memory_index = memory_index + 4;
11519 }
11520 memory_count--;
11521 }
11522 arm_insn_r->mem_rec_count = (memory_index >> 1);
11523 }
11524 /* Handle VLDM instructions. */
11525 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11526 || op_vldm_vstm == 0x13)
11527 {
11528 uint32_t reg_count, reg_vd;
11529 uint32_t reg_index = 0;
11530 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11531
11532 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11533 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11534
11535 /* REG_VD is the first D register number. If the instruction
11536 loads memory to S registers (SINGLE_REG is TRUE), the register
11537 number is (REG_VD << 1 | bit D), so the corresponding D
11538 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11539 if (!single_reg)
11540 reg_vd = reg_vd | (bit_d << 4);
11541
11542 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11543 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11544
11545 /* If the instruction loads memory to D register, REG_COUNT should
11546 be divided by 2, according to the ARM Architecture Reference
11547 Manual. If the instruction loads memory to S register, divide by
11548 2 as well because two S registers are mapped to D register. */
11549 reg_count = reg_count / 2;
11550 if (single_reg && bit_d)
11551 {
11552 /* Increase the register count if S register list starts from
11553 an odd number (bit d is one). */
11554 reg_count++;
11555 }
11556
11557 while (reg_count > 0)
11558 {
11559 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11560 reg_count--;
11561 }
11562 arm_insn_r->reg_rec_count = reg_index;
11563 }
11564 /* VSTR Vector store register. */
11565 else if ((opcode & 0x13) == 0x10)
11566 {
11567 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11568 uint32_t memory_index = 0;
11569
11570 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11571 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11572 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11573 imm_off32 = imm_off8 << 2;
11574
11575 if (bit (arm_insn_r->arm_insn, 23))
11576 start_address = u_regval + imm_off32;
11577 else
11578 start_address = u_regval - imm_off32;
11579
11580 if (single_reg)
11581 {
11582 record_buf_mem[memory_index] = 4;
11583 record_buf_mem[memory_index + 1] = start_address;
11584 arm_insn_r->mem_rec_count = 1;
11585 }
11586 else
11587 {
11588 record_buf_mem[memory_index] = 4;
11589 record_buf_mem[memory_index + 1] = start_address;
11590 record_buf_mem[memory_index + 2] = 4;
11591 record_buf_mem[memory_index + 3] = start_address + 4;
11592 arm_insn_r->mem_rec_count = 2;
11593 }
11594 }
11595 /* VLDR Vector load register. */
11596 else if ((opcode & 0x13) == 0x11)
11597 {
11598 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11599
11600 if (!single_reg)
11601 {
11602 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11603 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11604 }
11605 else
11606 {
11607 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11608 /* Record register D rather than pseudo register S. */
11609 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11610 }
11611 arm_insn_r->reg_rec_count = 1;
11612 }
11613
11614 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11615 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11616 return 0;
11617 }
11618
11619 /* Record handler for arm/thumb mode VFP data processing instructions. */
11620
11621 static int
11622 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11623 {
11624 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11625 uint32_t record_buf[4];
11626 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11627 enum insn_types curr_insn_type = INSN_INV;
11628
11629 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11630 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11631 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11632 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11633 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11634 bit_d = bit (arm_insn_r->arm_insn, 22);
11635 /* Mask off the "D" bit. */
11636 opc1 = opc1 & ~0x04;
11637
11638 /* Handle VMLA, VMLS. */
11639 if (opc1 == 0x00)
11640 {
11641 if (bit (arm_insn_r->arm_insn, 10))
11642 {
11643 if (bit (arm_insn_r->arm_insn, 6))
11644 curr_insn_type = INSN_T0;
11645 else
11646 curr_insn_type = INSN_T1;
11647 }
11648 else
11649 {
11650 if (dp_op_sz)
11651 curr_insn_type = INSN_T1;
11652 else
11653 curr_insn_type = INSN_T2;
11654 }
11655 }
11656 /* Handle VNMLA, VNMLS, VNMUL. */
11657 else if (opc1 == 0x01)
11658 {
11659 if (dp_op_sz)
11660 curr_insn_type = INSN_T1;
11661 else
11662 curr_insn_type = INSN_T2;
11663 }
11664 /* Handle VMUL. */
11665 else if (opc1 == 0x02 && !(opc3 & 0x01))
11666 {
11667 if (bit (arm_insn_r->arm_insn, 10))
11668 {
11669 if (bit (arm_insn_r->arm_insn, 6))
11670 curr_insn_type = INSN_T0;
11671 else
11672 curr_insn_type = INSN_T1;
11673 }
11674 else
11675 {
11676 if (dp_op_sz)
11677 curr_insn_type = INSN_T1;
11678 else
11679 curr_insn_type = INSN_T2;
11680 }
11681 }
11682 /* Handle VADD, VSUB. */
11683 else if (opc1 == 0x03)
11684 {
11685 if (!bit (arm_insn_r->arm_insn, 9))
11686 {
11687 if (bit (arm_insn_r->arm_insn, 6))
11688 curr_insn_type = INSN_T0;
11689 else
11690 curr_insn_type = INSN_T1;
11691 }
11692 else
11693 {
11694 if (dp_op_sz)
11695 curr_insn_type = INSN_T1;
11696 else
11697 curr_insn_type = INSN_T2;
11698 }
11699 }
11700 /* Handle VDIV. */
11701 else if (opc1 == 0x08)
11702 {
11703 if (dp_op_sz)
11704 curr_insn_type = INSN_T1;
11705 else
11706 curr_insn_type = INSN_T2;
11707 }
11708 /* Handle all other vfp data processing instructions. */
11709 else if (opc1 == 0x0b)
11710 {
11711 /* Handle VMOV. */
11712 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11713 {
11714 if (bit (arm_insn_r->arm_insn, 4))
11715 {
11716 if (bit (arm_insn_r->arm_insn, 6))
11717 curr_insn_type = INSN_T0;
11718 else
11719 curr_insn_type = INSN_T1;
11720 }
11721 else
11722 {
11723 if (dp_op_sz)
11724 curr_insn_type = INSN_T1;
11725 else
11726 curr_insn_type = INSN_T2;
11727 }
11728 }
11729 /* Handle VNEG and VABS. */
11730 else if ((opc2 == 0x01 && opc3 == 0x01)
11731 || (opc2 == 0x00 && opc3 == 0x03))
11732 {
11733 if (!bit (arm_insn_r->arm_insn, 11))
11734 {
11735 if (bit (arm_insn_r->arm_insn, 6))
11736 curr_insn_type = INSN_T0;
11737 else
11738 curr_insn_type = INSN_T1;
11739 }
11740 else
11741 {
11742 if (dp_op_sz)
11743 curr_insn_type = INSN_T1;
11744 else
11745 curr_insn_type = INSN_T2;
11746 }
11747 }
11748 /* Handle VSQRT. */
11749 else if (opc2 == 0x01 && opc3 == 0x03)
11750 {
11751 if (dp_op_sz)
11752 curr_insn_type = INSN_T1;
11753 else
11754 curr_insn_type = INSN_T2;
11755 }
11756 /* Handle VCVT. */
11757 else if (opc2 == 0x07 && opc3 == 0x03)
11758 {
11759 if (!dp_op_sz)
11760 curr_insn_type = INSN_T1;
11761 else
11762 curr_insn_type = INSN_T2;
11763 }
11764 else if (opc3 & 0x01)
11765 {
11766 /* Handle VCVT. */
11767 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11768 {
11769 if (!bit (arm_insn_r->arm_insn, 18))
11770 curr_insn_type = INSN_T2;
11771 else
11772 {
11773 if (dp_op_sz)
11774 curr_insn_type = INSN_T1;
11775 else
11776 curr_insn_type = INSN_T2;
11777 }
11778 }
11779 /* Handle VCVT. */
11780 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11781 {
11782 if (dp_op_sz)
11783 curr_insn_type = INSN_T1;
11784 else
11785 curr_insn_type = INSN_T2;
11786 }
11787 /* Handle VCVTB, VCVTT. */
11788 else if ((opc2 & 0x0e) == 0x02)
11789 curr_insn_type = INSN_T2;
11790 /* Handle VCMP, VCMPE. */
11791 else if ((opc2 & 0x0e) == 0x04)
11792 curr_insn_type = INSN_T3;
11793 }
11794 }
11795
11796 switch (curr_insn_type)
11797 {
11798 case INSN_T0:
11799 reg_vd = reg_vd | (bit_d << 4);
11800 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11801 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11802 arm_insn_r->reg_rec_count = 2;
11803 break;
11804
11805 case INSN_T1:
11806 reg_vd = reg_vd | (bit_d << 4);
11807 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11808 arm_insn_r->reg_rec_count = 1;
11809 break;
11810
11811 case INSN_T2:
11812 reg_vd = (reg_vd << 1) | bit_d;
11813 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11814 arm_insn_r->reg_rec_count = 1;
11815 break;
11816
11817 case INSN_T3:
11818 record_buf[0] = ARM_FPSCR_REGNUM;
11819 arm_insn_r->reg_rec_count = 1;
11820 break;
11821
11822 default:
11823 gdb_assert_not_reached ("no decoding pattern found");
11824 break;
11825 }
11826
11827 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11828 return 0;
11829 }
11830
11831 /* Handling opcode 110 insns. */
11832
11833 static int
11834 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11835 {
11836 uint32_t op1, op1_ebit, coproc;
11837
11838 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11839 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11840 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11841
11842 if ((coproc & 0x0e) == 0x0a)
11843 {
11844 /* Handle extension register ld/st instructions. */
11845 if (!(op1 & 0x20))
11846 return arm_record_exreg_ld_st_insn (arm_insn_r);
11847
11848 /* 64-bit transfers between arm core and extension registers. */
11849 if ((op1 & 0x3e) == 0x04)
11850 return arm_record_exreg_ld_st_insn (arm_insn_r);
11851 }
11852 else
11853 {
11854 /* Handle coprocessor ld/st instructions. */
11855 if (!(op1 & 0x3a))
11856 {
11857 /* Store. */
11858 if (!op1_ebit)
11859 return arm_record_unsupported_insn (arm_insn_r);
11860 else
11861 /* Load. */
11862 return arm_record_unsupported_insn (arm_insn_r);
11863 }
11864
11865 /* Move to coprocessor from two arm core registers. */
11866 if (op1 == 0x4)
11867 return arm_record_unsupported_insn (arm_insn_r);
11868
11869 /* Move to two arm core registers from coprocessor. */
11870 if (op1 == 0x5)
11871 {
11872 uint32_t reg_t[2];
11873
11874 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11875 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11876 arm_insn_r->reg_rec_count = 2;
11877
11878 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11879 return 0;
11880 }
11881 }
11882 return arm_record_unsupported_insn (arm_insn_r);
11883 }
11884
11885 /* Handling opcode 111 insns. */
11886
11887 static int
11888 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11889 {
11890 uint32_t op, op1_ebit, coproc, bits_24_25;
11891 arm_gdbarch_tdep *tdep
11892 = (arm_gdbarch_tdep *) gdbarch_tdep (arm_insn_r->gdbarch);
11893 struct regcache *reg_cache = arm_insn_r->regcache;
11894
11895 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11896 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11897 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11898 op = bit (arm_insn_r->arm_insn, 4);
11899 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11900
11901 /* Handle arm SWI/SVC system call instructions. */
11902 if (bits_24_25 == 0x3)
11903 {
11904 if (tdep->arm_syscall_record != NULL)
11905 {
11906 ULONGEST svc_operand, svc_number;
11907
11908 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11909
11910 if (svc_operand) /* OABI. */
11911 svc_number = svc_operand - 0x900000;
11912 else /* EABI. */
11913 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11914
11915 return tdep->arm_syscall_record (reg_cache, svc_number);
11916 }
11917 else
11918 {
11919 printf_unfiltered (_("no syscall record support\n"));
11920 return -1;
11921 }
11922 }
11923 else if (bits_24_25 == 0x02)
11924 {
11925 if (op)
11926 {
11927 if ((coproc & 0x0e) == 0x0a)
11928 {
11929 /* 8, 16, and 32-bit transfer */
11930 return arm_record_vdata_transfer_insn (arm_insn_r);
11931 }
11932 else
11933 {
11934 if (op1_ebit)
11935 {
11936 /* MRC, MRC2 */
11937 uint32_t record_buf[1];
11938
11939 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11940 if (record_buf[0] == 15)
11941 record_buf[0] = ARM_PS_REGNUM;
11942
11943 arm_insn_r->reg_rec_count = 1;
11944 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11945 record_buf);
11946 return 0;
11947 }
11948 else
11949 {
11950 /* MCR, MCR2 */
11951 return -1;
11952 }
11953 }
11954 }
11955 else
11956 {
11957 if ((coproc & 0x0e) == 0x0a)
11958 {
11959 /* VFP data-processing instructions. */
11960 return arm_record_vfp_data_proc_insn (arm_insn_r);
11961 }
11962 else
11963 {
11964 /* CDP, CDP2 */
11965 return -1;
11966 }
11967 }
11968 }
11969 else
11970 {
11971 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11972
11973 if (op1 == 5)
11974 {
11975 if ((coproc & 0x0e) != 0x0a)
11976 {
11977 /* MRRC, MRRC2 */
11978 return -1;
11979 }
11980 }
11981 else if (op1 == 4 || op1 == 5)
11982 {
11983 if ((coproc & 0x0e) == 0x0a)
11984 {
11985 /* 64-bit transfers between ARM core and extension */
11986 return -1;
11987 }
11988 else if (op1 == 4)
11989 {
11990 /* MCRR, MCRR2 */
11991 return -1;
11992 }
11993 }
11994 else if (op1 == 0 || op1 == 1)
11995 {
11996 /* UNDEFINED */
11997 return -1;
11998 }
11999 else
12000 {
12001 if ((coproc & 0x0e) == 0x0a)
12002 {
12003 /* Extension register load/store */
12004 }
12005 else
12006 {
12007 /* STC, STC2, LDC, LDC2 */
12008 }
12009 return -1;
12010 }
12011 }
12012
12013 return -1;
12014 }
12015
12016 /* Handling opcode 000 insns. */
12017
12018 static int
12019 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12020 {
12021 uint32_t record_buf[8];
12022 uint32_t reg_src1 = 0;
12023
12024 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12025
12026 record_buf[0] = ARM_PS_REGNUM;
12027 record_buf[1] = reg_src1;
12028 thumb_insn_r->reg_rec_count = 2;
12029
12030 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12031
12032 return 0;
12033 }
12034
12035
12036 /* Handling opcode 001 insns. */
12037
12038 static int
12039 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12040 {
12041 uint32_t record_buf[8];
12042 uint32_t reg_src1 = 0;
12043
12044 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12045
12046 record_buf[0] = ARM_PS_REGNUM;
12047 record_buf[1] = reg_src1;
12048 thumb_insn_r->reg_rec_count = 2;
12049
12050 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12051
12052 return 0;
12053 }
12054
12055 /* Handling opcode 010 insns. */
12056
12057 static int
12058 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12059 {
12060 struct regcache *reg_cache = thumb_insn_r->regcache;
12061 uint32_t record_buf[8], record_buf_mem[8];
12062
12063 uint32_t reg_src1 = 0, reg_src2 = 0;
12064 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12065
12066 ULONGEST u_regval[2] = {0};
12067
12068 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12069
12070 if (bit (thumb_insn_r->arm_insn, 12))
12071 {
12072 /* Handle load/store register offset. */
12073 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
12074
12075 if (in_inclusive_range (opB, 4U, 7U))
12076 {
12077 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12078 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12079 record_buf[0] = reg_src1;
12080 thumb_insn_r->reg_rec_count = 1;
12081 }
12082 else if (in_inclusive_range (opB, 0U, 2U))
12083 {
12084 /* STR(2), STRB(2), STRH(2) . */
12085 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12086 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12087 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12088 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12089 if (0 == opB)
12090 record_buf_mem[0] = 4; /* STR (2). */
12091 else if (2 == opB)
12092 record_buf_mem[0] = 1; /* STRB (2). */
12093 else if (1 == opB)
12094 record_buf_mem[0] = 2; /* STRH (2). */
12095 record_buf_mem[1] = u_regval[0] + u_regval[1];
12096 thumb_insn_r->mem_rec_count = 1;
12097 }
12098 }
12099 else if (bit (thumb_insn_r->arm_insn, 11))
12100 {
12101 /* Handle load from literal pool. */
12102 /* LDR(3). */
12103 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12104 record_buf[0] = reg_src1;
12105 thumb_insn_r->reg_rec_count = 1;
12106 }
12107 else if (opcode1)
12108 {
12109 /* Special data instructions and branch and exchange */
12110 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12111 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12112 if ((3 == opcode2) && (!opcode3))
12113 {
12114 /* Branch with exchange. */
12115 record_buf[0] = ARM_PS_REGNUM;
12116 thumb_insn_r->reg_rec_count = 1;
12117 }
12118 else
12119 {
12120 /* Format 8; special data processing insns. */
12121 record_buf[0] = ARM_PS_REGNUM;
12122 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
12123 | bits (thumb_insn_r->arm_insn, 0, 2));
12124 thumb_insn_r->reg_rec_count = 2;
12125 }
12126 }
12127 else
12128 {
12129 /* Format 5; data processing insns. */
12130 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12131 if (bit (thumb_insn_r->arm_insn, 7))
12132 {
12133 reg_src1 = reg_src1 + 8;
12134 }
12135 record_buf[0] = ARM_PS_REGNUM;
12136 record_buf[1] = reg_src1;
12137 thumb_insn_r->reg_rec_count = 2;
12138 }
12139
12140 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12141 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12142 record_buf_mem);
12143
12144 return 0;
12145 }
12146
12147 /* Handling opcode 001 insns. */
12148
12149 static int
12150 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12151 {
12152 struct regcache *reg_cache = thumb_insn_r->regcache;
12153 uint32_t record_buf[8], record_buf_mem[8];
12154
12155 uint32_t reg_src1 = 0;
12156 uint32_t opcode = 0, immed_5 = 0;
12157
12158 ULONGEST u_regval = 0;
12159
12160 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12161
12162 if (opcode)
12163 {
12164 /* LDR(1). */
12165 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12166 record_buf[0] = reg_src1;
12167 thumb_insn_r->reg_rec_count = 1;
12168 }
12169 else
12170 {
12171 /* STR(1). */
12172 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12173 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12174 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12175 record_buf_mem[0] = 4;
12176 record_buf_mem[1] = u_regval + (immed_5 * 4);
12177 thumb_insn_r->mem_rec_count = 1;
12178 }
12179
12180 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12181 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12182 record_buf_mem);
12183
12184 return 0;
12185 }
12186
12187 /* Handling opcode 100 insns. */
12188
12189 static int
12190 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12191 {
12192 struct regcache *reg_cache = thumb_insn_r->regcache;
12193 uint32_t record_buf[8], record_buf_mem[8];
12194
12195 uint32_t reg_src1 = 0;
12196 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12197
12198 ULONGEST u_regval = 0;
12199
12200 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12201
12202 if (3 == opcode)
12203 {
12204 /* LDR(4). */
12205 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12206 record_buf[0] = reg_src1;
12207 thumb_insn_r->reg_rec_count = 1;
12208 }
12209 else if (1 == opcode)
12210 {
12211 /* LDRH(1). */
12212 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12213 record_buf[0] = reg_src1;
12214 thumb_insn_r->reg_rec_count = 1;
12215 }
12216 else if (2 == opcode)
12217 {
12218 /* STR(3). */
12219 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12220 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12221 record_buf_mem[0] = 4;
12222 record_buf_mem[1] = u_regval + (immed_8 * 4);
12223 thumb_insn_r->mem_rec_count = 1;
12224 }
12225 else if (0 == opcode)
12226 {
12227 /* STRH(1). */
12228 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12229 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12230 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12231 record_buf_mem[0] = 2;
12232 record_buf_mem[1] = u_regval + (immed_5 * 2);
12233 thumb_insn_r->mem_rec_count = 1;
12234 }
12235
12236 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12237 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12238 record_buf_mem);
12239
12240 return 0;
12241 }
12242
12243 /* Handling opcode 101 insns. */
12244
12245 static int
12246 thumb_record_misc (insn_decode_record *thumb_insn_r)
12247 {
12248 struct regcache *reg_cache = thumb_insn_r->regcache;
12249
12250 uint32_t opcode = 0;
12251 uint32_t register_bits = 0, register_count = 0;
12252 uint32_t index = 0, start_address = 0;
12253 uint32_t record_buf[24], record_buf_mem[48];
12254 uint32_t reg_src1;
12255
12256 ULONGEST u_regval = 0;
12257
12258 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12259
12260 if (opcode == 0 || opcode == 1)
12261 {
12262 /* ADR and ADD (SP plus immediate) */
12263
12264 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12265 record_buf[0] = reg_src1;
12266 thumb_insn_r->reg_rec_count = 1;
12267 }
12268 else
12269 {
12270 /* Miscellaneous 16-bit instructions */
12271 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12272
12273 switch (opcode2)
12274 {
12275 case 6:
12276 /* SETEND and CPS */
12277 break;
12278 case 0:
12279 /* ADD/SUB (SP plus immediate) */
12280 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12281 record_buf[0] = ARM_SP_REGNUM;
12282 thumb_insn_r->reg_rec_count = 1;
12283 break;
12284 case 1: /* fall through */
12285 case 3: /* fall through */
12286 case 9: /* fall through */
12287 case 11:
12288 /* CBNZ, CBZ */
12289 break;
12290 case 2:
12291 /* SXTH, SXTB, UXTH, UXTB */
12292 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12293 thumb_insn_r->reg_rec_count = 1;
12294 break;
12295 case 4: /* fall through */
12296 case 5:
12297 /* PUSH. */
12298 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12299 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12300 while (register_bits)
12301 {
12302 if (register_bits & 0x00000001)
12303 register_count++;
12304 register_bits = register_bits >> 1;
12305 }
12306 start_address = u_regval - \
12307 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12308 thumb_insn_r->mem_rec_count = register_count;
12309 while (register_count)
12310 {
12311 record_buf_mem[(register_count * 2) - 1] = start_address;
12312 record_buf_mem[(register_count * 2) - 2] = 4;
12313 start_address = start_address + 4;
12314 register_count--;
12315 }
12316 record_buf[0] = ARM_SP_REGNUM;
12317 thumb_insn_r->reg_rec_count = 1;
12318 break;
12319 case 10:
12320 /* REV, REV16, REVSH */
12321 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12322 thumb_insn_r->reg_rec_count = 1;
12323 break;
12324 case 12: /* fall through */
12325 case 13:
12326 /* POP. */
12327 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12328 while (register_bits)
12329 {
12330 if (register_bits & 0x00000001)
12331 record_buf[index++] = register_count;
12332 register_bits = register_bits >> 1;
12333 register_count++;
12334 }
12335 record_buf[index++] = ARM_PS_REGNUM;
12336 record_buf[index++] = ARM_SP_REGNUM;
12337 thumb_insn_r->reg_rec_count = index;
12338 break;
12339 case 0xe:
12340 /* BKPT insn. */
12341 /* Handle enhanced software breakpoint insn, BKPT. */
12342 /* CPSR is changed to be executed in ARM state, disabling normal
12343 interrupts, entering abort mode. */
12344 /* According to high vector configuration PC is set. */
12345 /* User hits breakpoint and type reverse, in that case, we need to go back with
12346 previous CPSR and Program Counter. */
12347 record_buf[0] = ARM_PS_REGNUM;
12348 record_buf[1] = ARM_LR_REGNUM;
12349 thumb_insn_r->reg_rec_count = 2;
12350 /* We need to save SPSR value, which is not yet done. */
12351 printf_unfiltered (_("Process record does not support instruction "
12352 "0x%0x at address %s.\n"),
12353 thumb_insn_r->arm_insn,
12354 paddress (thumb_insn_r->gdbarch,
12355 thumb_insn_r->this_addr));
12356 return -1;
12357
12358 case 0xf:
12359 /* If-Then, and hints */
12360 break;
12361 default:
12362 return -1;
12363 };
12364 }
12365
12366 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12367 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12368 record_buf_mem);
12369
12370 return 0;
12371 }
12372
12373 /* Handling opcode 110 insns. */
12374
12375 static int
12376 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12377 {
12378 arm_gdbarch_tdep *tdep
12379 = (arm_gdbarch_tdep *) gdbarch_tdep (thumb_insn_r->gdbarch);
12380 struct regcache *reg_cache = thumb_insn_r->regcache;
12381
12382 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12383 uint32_t reg_src1 = 0;
12384 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12385 uint32_t index = 0, start_address = 0;
12386 uint32_t record_buf[24], record_buf_mem[48];
12387
12388 ULONGEST u_regval = 0;
12389
12390 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12391 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12392
12393 if (1 == opcode2)
12394 {
12395
12396 /* LDMIA. */
12397 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12398 /* Get Rn. */
12399 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12400 while (register_bits)
12401 {
12402 if (register_bits & 0x00000001)
12403 record_buf[index++] = register_count;
12404 register_bits = register_bits >> 1;
12405 register_count++;
12406 }
12407 record_buf[index++] = reg_src1;
12408 thumb_insn_r->reg_rec_count = index;
12409 }
12410 else if (0 == opcode2)
12411 {
12412 /* It handles both STMIA. */
12413 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12414 /* Get Rn. */
12415 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12416 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12417 while (register_bits)
12418 {
12419 if (register_bits & 0x00000001)
12420 register_count++;
12421 register_bits = register_bits >> 1;
12422 }
12423 start_address = u_regval;
12424 thumb_insn_r->mem_rec_count = register_count;
12425 while (register_count)
12426 {
12427 record_buf_mem[(register_count * 2) - 1] = start_address;
12428 record_buf_mem[(register_count * 2) - 2] = 4;
12429 start_address = start_address + 4;
12430 register_count--;
12431 }
12432 }
12433 else if (0x1F == opcode1)
12434 {
12435 /* Handle arm syscall insn. */
12436 if (tdep->arm_syscall_record != NULL)
12437 {
12438 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12439 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12440 }
12441 else
12442 {
12443 printf_unfiltered (_("no syscall record support\n"));
12444 return -1;
12445 }
12446 }
12447
12448 /* B (1), conditional branch is automatically taken care in process_record,
12449 as PC is saved there. */
12450
12451 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12452 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12453 record_buf_mem);
12454
12455 return ret;
12456 }
12457
12458 /* Handling opcode 111 insns. */
12459
12460 static int
12461 thumb_record_branch (insn_decode_record *thumb_insn_r)
12462 {
12463 uint32_t record_buf[8];
12464 uint32_t bits_h = 0;
12465
12466 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12467
12468 if (2 == bits_h || 3 == bits_h)
12469 {
12470 /* BL */
12471 record_buf[0] = ARM_LR_REGNUM;
12472 thumb_insn_r->reg_rec_count = 1;
12473 }
12474 else if (1 == bits_h)
12475 {
12476 /* BLX(1). */
12477 record_buf[0] = ARM_PS_REGNUM;
12478 record_buf[1] = ARM_LR_REGNUM;
12479 thumb_insn_r->reg_rec_count = 2;
12480 }
12481
12482 /* B(2) is automatically taken care in process_record, as PC is
12483 saved there. */
12484
12485 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12486
12487 return 0;
12488 }
12489
12490 /* Handler for thumb2 load/store multiple instructions. */
12491
12492 static int
12493 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12494 {
12495 struct regcache *reg_cache = thumb2_insn_r->regcache;
12496
12497 uint32_t reg_rn, op;
12498 uint32_t register_bits = 0, register_count = 0;
12499 uint32_t index = 0, start_address = 0;
12500 uint32_t record_buf[24], record_buf_mem[48];
12501
12502 ULONGEST u_regval = 0;
12503
12504 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12505 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12506
12507 if (0 == op || 3 == op)
12508 {
12509 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12510 {
12511 /* Handle RFE instruction. */
12512 record_buf[0] = ARM_PS_REGNUM;
12513 thumb2_insn_r->reg_rec_count = 1;
12514 }
12515 else
12516 {
12517 /* Handle SRS instruction after reading banked SP. */
12518 return arm_record_unsupported_insn (thumb2_insn_r);
12519 }
12520 }
12521 else if (1 == op || 2 == op)
12522 {
12523 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12524 {
12525 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12526 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12527 while (register_bits)
12528 {
12529 if (register_bits & 0x00000001)
12530 record_buf[index++] = register_count;
12531
12532 register_count++;
12533 register_bits = register_bits >> 1;
12534 }
12535 record_buf[index++] = reg_rn;
12536 record_buf[index++] = ARM_PS_REGNUM;
12537 thumb2_insn_r->reg_rec_count = index;
12538 }
12539 else
12540 {
12541 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12542 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12543 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12544 while (register_bits)
12545 {
12546 if (register_bits & 0x00000001)
12547 register_count++;
12548
12549 register_bits = register_bits >> 1;
12550 }
12551
12552 if (1 == op)
12553 {
12554 /* Start address calculation for LDMDB/LDMEA. */
12555 start_address = u_regval;
12556 }
12557 else if (2 == op)
12558 {
12559 /* Start address calculation for LDMDB/LDMEA. */
12560 start_address = u_regval - register_count * 4;
12561 }
12562
12563 thumb2_insn_r->mem_rec_count = register_count;
12564 while (register_count)
12565 {
12566 record_buf_mem[register_count * 2 - 1] = start_address;
12567 record_buf_mem[register_count * 2 - 2] = 4;
12568 start_address = start_address + 4;
12569 register_count--;
12570 }
12571 record_buf[0] = reg_rn;
12572 record_buf[1] = ARM_PS_REGNUM;
12573 thumb2_insn_r->reg_rec_count = 2;
12574 }
12575 }
12576
12577 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12578 record_buf_mem);
12579 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12580 record_buf);
12581 return ARM_RECORD_SUCCESS;
12582 }
12583
12584 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12585 instructions. */
12586
12587 static int
12588 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12589 {
12590 struct regcache *reg_cache = thumb2_insn_r->regcache;
12591
12592 uint32_t reg_rd, reg_rn, offset_imm;
12593 uint32_t reg_dest1, reg_dest2;
12594 uint32_t address, offset_addr;
12595 uint32_t record_buf[8], record_buf_mem[8];
12596 uint32_t op1, op2, op3;
12597
12598 ULONGEST u_regval[2];
12599
12600 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12601 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12602 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12603
12604 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12605 {
12606 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12607 {
12608 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12609 record_buf[0] = reg_dest1;
12610 record_buf[1] = ARM_PS_REGNUM;
12611 thumb2_insn_r->reg_rec_count = 2;
12612 }
12613
12614 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12615 {
12616 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12617 record_buf[2] = reg_dest2;
12618 thumb2_insn_r->reg_rec_count = 3;
12619 }
12620 }
12621 else
12622 {
12623 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12624 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12625
12626 if (0 == op1 && 0 == op2)
12627 {
12628 /* Handle STREX. */
12629 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12630 address = u_regval[0] + (offset_imm * 4);
12631 record_buf_mem[0] = 4;
12632 record_buf_mem[1] = address;
12633 thumb2_insn_r->mem_rec_count = 1;
12634 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12635 record_buf[0] = reg_rd;
12636 thumb2_insn_r->reg_rec_count = 1;
12637 }
12638 else if (1 == op1 && 0 == op2)
12639 {
12640 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12641 record_buf[0] = reg_rd;
12642 thumb2_insn_r->reg_rec_count = 1;
12643 address = u_regval[0];
12644 record_buf_mem[1] = address;
12645
12646 if (4 == op3)
12647 {
12648 /* Handle STREXB. */
12649 record_buf_mem[0] = 1;
12650 thumb2_insn_r->mem_rec_count = 1;
12651 }
12652 else if (5 == op3)
12653 {
12654 /* Handle STREXH. */
12655 record_buf_mem[0] = 2 ;
12656 thumb2_insn_r->mem_rec_count = 1;
12657 }
12658 else if (7 == op3)
12659 {
12660 /* Handle STREXD. */
12661 address = u_regval[0];
12662 record_buf_mem[0] = 4;
12663 record_buf_mem[2] = 4;
12664 record_buf_mem[3] = address + 4;
12665 thumb2_insn_r->mem_rec_count = 2;
12666 }
12667 }
12668 else
12669 {
12670 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12671
12672 if (bit (thumb2_insn_r->arm_insn, 24))
12673 {
12674 if (bit (thumb2_insn_r->arm_insn, 23))
12675 offset_addr = u_regval[0] + (offset_imm * 4);
12676 else
12677 offset_addr = u_regval[0] - (offset_imm * 4);
12678
12679 address = offset_addr;
12680 }
12681 else
12682 address = u_regval[0];
12683
12684 record_buf_mem[0] = 4;
12685 record_buf_mem[1] = address;
12686 record_buf_mem[2] = 4;
12687 record_buf_mem[3] = address + 4;
12688 thumb2_insn_r->mem_rec_count = 2;
12689 record_buf[0] = reg_rn;
12690 thumb2_insn_r->reg_rec_count = 1;
12691 }
12692 }
12693
12694 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12695 record_buf);
12696 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12697 record_buf_mem);
12698 return ARM_RECORD_SUCCESS;
12699 }
12700
12701 /* Handler for thumb2 data processing (shift register and modified immediate)
12702 instructions. */
12703
12704 static int
12705 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12706 {
12707 uint32_t reg_rd, op;
12708 uint32_t record_buf[8];
12709
12710 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12711 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12712
12713 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12714 {
12715 record_buf[0] = ARM_PS_REGNUM;
12716 thumb2_insn_r->reg_rec_count = 1;
12717 }
12718 else
12719 {
12720 record_buf[0] = reg_rd;
12721 record_buf[1] = ARM_PS_REGNUM;
12722 thumb2_insn_r->reg_rec_count = 2;
12723 }
12724
12725 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12726 record_buf);
12727 return ARM_RECORD_SUCCESS;
12728 }
12729
12730 /* Generic handler for thumb2 instructions which effect destination and PS
12731 registers. */
12732
12733 static int
12734 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12735 {
12736 uint32_t reg_rd;
12737 uint32_t record_buf[8];
12738
12739 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12740
12741 record_buf[0] = reg_rd;
12742 record_buf[1] = ARM_PS_REGNUM;
12743 thumb2_insn_r->reg_rec_count = 2;
12744
12745 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12746 record_buf);
12747 return ARM_RECORD_SUCCESS;
12748 }
12749
12750 /* Handler for thumb2 branch and miscellaneous control instructions. */
12751
12752 static int
12753 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12754 {
12755 uint32_t op, op1, op2;
12756 uint32_t record_buf[8];
12757
12758 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12759 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12760 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12761
12762 /* Handle MSR insn. */
12763 if (!(op1 & 0x2) && 0x38 == op)
12764 {
12765 if (!(op2 & 0x3))
12766 {
12767 /* CPSR is going to be changed. */
12768 record_buf[0] = ARM_PS_REGNUM;
12769 thumb2_insn_r->reg_rec_count = 1;
12770 }
12771 else
12772 {
12773 arm_record_unsupported_insn(thumb2_insn_r);
12774 return -1;
12775 }
12776 }
12777 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12778 {
12779 /* BLX. */
12780 record_buf[0] = ARM_PS_REGNUM;
12781 record_buf[1] = ARM_LR_REGNUM;
12782 thumb2_insn_r->reg_rec_count = 2;
12783 }
12784
12785 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12786 record_buf);
12787 return ARM_RECORD_SUCCESS;
12788 }
12789
12790 /* Handler for thumb2 store single data item instructions. */
12791
12792 static int
12793 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12794 {
12795 struct regcache *reg_cache = thumb2_insn_r->regcache;
12796
12797 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12798 uint32_t address, offset_addr;
12799 uint32_t record_buf[8], record_buf_mem[8];
12800 uint32_t op1, op2;
12801
12802 ULONGEST u_regval[2];
12803
12804 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12805 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12806 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12807 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12808
12809 if (bit (thumb2_insn_r->arm_insn, 23))
12810 {
12811 /* T2 encoding. */
12812 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12813 offset_addr = u_regval[0] + offset_imm;
12814 address = offset_addr;
12815 }
12816 else
12817 {
12818 /* T3 encoding. */
12819 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12820 {
12821 /* Handle STRB (register). */
12822 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12823 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12824 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12825 offset_addr = u_regval[1] << shift_imm;
12826 address = u_regval[0] + offset_addr;
12827 }
12828 else
12829 {
12830 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12831 if (bit (thumb2_insn_r->arm_insn, 10))
12832 {
12833 if (bit (thumb2_insn_r->arm_insn, 9))
12834 offset_addr = u_regval[0] + offset_imm;
12835 else
12836 offset_addr = u_regval[0] - offset_imm;
12837
12838 address = offset_addr;
12839 }
12840 else
12841 address = u_regval[0];
12842 }
12843 }
12844
12845 switch (op1)
12846 {
12847 /* Store byte instructions. */
12848 case 4:
12849 case 0:
12850 record_buf_mem[0] = 1;
12851 break;
12852 /* Store half word instructions. */
12853 case 1:
12854 case 5:
12855 record_buf_mem[0] = 2;
12856 break;
12857 /* Store word instructions. */
12858 case 2:
12859 case 6:
12860 record_buf_mem[0] = 4;
12861 break;
12862
12863 default:
12864 gdb_assert_not_reached ("no decoding pattern found");
12865 break;
12866 }
12867
12868 record_buf_mem[1] = address;
12869 thumb2_insn_r->mem_rec_count = 1;
12870 record_buf[0] = reg_rn;
12871 thumb2_insn_r->reg_rec_count = 1;
12872
12873 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12874 record_buf);
12875 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12876 record_buf_mem);
12877 return ARM_RECORD_SUCCESS;
12878 }
12879
12880 /* Handler for thumb2 load memory hints instructions. */
12881
12882 static int
12883 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12884 {
12885 uint32_t record_buf[8];
12886 uint32_t reg_rt, reg_rn;
12887
12888 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12889 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12890
12891 if (ARM_PC_REGNUM != reg_rt)
12892 {
12893 record_buf[0] = reg_rt;
12894 record_buf[1] = reg_rn;
12895 record_buf[2] = ARM_PS_REGNUM;
12896 thumb2_insn_r->reg_rec_count = 3;
12897
12898 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12899 record_buf);
12900 return ARM_RECORD_SUCCESS;
12901 }
12902
12903 return ARM_RECORD_FAILURE;
12904 }
12905
12906 /* Handler for thumb2 load word instructions. */
12907
12908 static int
12909 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12910 {
12911 uint32_t record_buf[8];
12912
12913 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12914 record_buf[1] = ARM_PS_REGNUM;
12915 thumb2_insn_r->reg_rec_count = 2;
12916
12917 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12918 record_buf);
12919 return ARM_RECORD_SUCCESS;
12920 }
12921
12922 /* Handler for thumb2 long multiply, long multiply accumulate, and
12923 divide instructions. */
12924
12925 static int
12926 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12927 {
12928 uint32_t opcode1 = 0, opcode2 = 0;
12929 uint32_t record_buf[8];
12930
12931 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12932 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12933
12934 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12935 {
12936 /* Handle SMULL, UMULL, SMULAL. */
12937 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12938 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12939 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12940 record_buf[2] = ARM_PS_REGNUM;
12941 thumb2_insn_r->reg_rec_count = 3;
12942 }
12943 else if (1 == opcode1 || 3 == opcode2)
12944 {
12945 /* Handle SDIV and UDIV. */
12946 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12947 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12948 record_buf[2] = ARM_PS_REGNUM;
12949 thumb2_insn_r->reg_rec_count = 3;
12950 }
12951 else
12952 return ARM_RECORD_FAILURE;
12953
12954 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12955 record_buf);
12956 return ARM_RECORD_SUCCESS;
12957 }
12958
12959 /* Record handler for thumb32 coprocessor instructions. */
12960
12961 static int
12962 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12963 {
12964 if (bit (thumb2_insn_r->arm_insn, 25))
12965 return arm_record_coproc_data_proc (thumb2_insn_r);
12966 else
12967 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12968 }
12969
12970 /* Record handler for advance SIMD structure load/store instructions. */
12971
12972 static int
12973 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12974 {
12975 struct regcache *reg_cache = thumb2_insn_r->regcache;
12976 uint32_t l_bit, a_bit, b_bits;
12977 uint32_t record_buf[128], record_buf_mem[128];
12978 uint32_t reg_rn, reg_vd, address, f_elem;
12979 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12980 uint8_t f_ebytes;
12981
12982 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12983 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12984 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12985 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12986 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12987 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12988 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12989 f_elem = 8 / f_ebytes;
12990
12991 if (!l_bit)
12992 {
12993 ULONGEST u_regval = 0;
12994 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12995 address = u_regval;
12996
12997 if (!a_bit)
12998 {
12999 /* Handle VST1. */
13000 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13001 {
13002 if (b_bits == 0x07)
13003 bf_regs = 1;
13004 else if (b_bits == 0x0a)
13005 bf_regs = 2;
13006 else if (b_bits == 0x06)
13007 bf_regs = 3;
13008 else if (b_bits == 0x02)
13009 bf_regs = 4;
13010 else
13011 bf_regs = 0;
13012
13013 for (index_r = 0; index_r < bf_regs; index_r++)
13014 {
13015 for (index_e = 0; index_e < f_elem; index_e++)
13016 {
13017 record_buf_mem[index_m++] = f_ebytes;
13018 record_buf_mem[index_m++] = address;
13019 address = address + f_ebytes;
13020 thumb2_insn_r->mem_rec_count += 1;
13021 }
13022 }
13023 }
13024 /* Handle VST2. */
13025 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13026 {
13027 if (b_bits == 0x09 || b_bits == 0x08)
13028 bf_regs = 1;
13029 else if (b_bits == 0x03)
13030 bf_regs = 2;
13031 else
13032 bf_regs = 0;
13033
13034 for (index_r = 0; index_r < bf_regs; index_r++)
13035 for (index_e = 0; index_e < f_elem; index_e++)
13036 {
13037 for (loop_t = 0; loop_t < 2; loop_t++)
13038 {
13039 record_buf_mem[index_m++] = f_ebytes;
13040 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13041 thumb2_insn_r->mem_rec_count += 1;
13042 }
13043 address = address + (2 * f_ebytes);
13044 }
13045 }
13046 /* Handle VST3. */
13047 else if ((b_bits & 0x0e) == 0x04)
13048 {
13049 for (index_e = 0; index_e < f_elem; index_e++)
13050 {
13051 for (loop_t = 0; loop_t < 3; loop_t++)
13052 {
13053 record_buf_mem[index_m++] = f_ebytes;
13054 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13055 thumb2_insn_r->mem_rec_count += 1;
13056 }
13057 address = address + (3 * f_ebytes);
13058 }
13059 }
13060 /* Handle VST4. */
13061 else if (!(b_bits & 0x0e))
13062 {
13063 for (index_e = 0; index_e < f_elem; index_e++)
13064 {
13065 for (loop_t = 0; loop_t < 4; loop_t++)
13066 {
13067 record_buf_mem[index_m++] = f_ebytes;
13068 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13069 thumb2_insn_r->mem_rec_count += 1;
13070 }
13071 address = address + (4 * f_ebytes);
13072 }
13073 }
13074 }
13075 else
13076 {
13077 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13078
13079 if (bft_size == 0x00)
13080 f_ebytes = 1;
13081 else if (bft_size == 0x01)
13082 f_ebytes = 2;
13083 else if (bft_size == 0x02)
13084 f_ebytes = 4;
13085 else
13086 f_ebytes = 0;
13087
13088 /* Handle VST1. */
13089 if (!(b_bits & 0x0b) || b_bits == 0x08)
13090 thumb2_insn_r->mem_rec_count = 1;
13091 /* Handle VST2. */
13092 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13093 thumb2_insn_r->mem_rec_count = 2;
13094 /* Handle VST3. */
13095 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13096 thumb2_insn_r->mem_rec_count = 3;
13097 /* Handle VST4. */
13098 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13099 thumb2_insn_r->mem_rec_count = 4;
13100
13101 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13102 {
13103 record_buf_mem[index_m] = f_ebytes;
13104 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13105 }
13106 }
13107 }
13108 else
13109 {
13110 if (!a_bit)
13111 {
13112 /* Handle VLD1. */
13113 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13114 thumb2_insn_r->reg_rec_count = 1;
13115 /* Handle VLD2. */
13116 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13117 thumb2_insn_r->reg_rec_count = 2;
13118 /* Handle VLD3. */
13119 else if ((b_bits & 0x0e) == 0x04)
13120 thumb2_insn_r->reg_rec_count = 3;
13121 /* Handle VLD4. */
13122 else if (!(b_bits & 0x0e))
13123 thumb2_insn_r->reg_rec_count = 4;
13124 }
13125 else
13126 {
13127 /* Handle VLD1. */
13128 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13129 thumb2_insn_r->reg_rec_count = 1;
13130 /* Handle VLD2. */
13131 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13132 thumb2_insn_r->reg_rec_count = 2;
13133 /* Handle VLD3. */
13134 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13135 thumb2_insn_r->reg_rec_count = 3;
13136 /* Handle VLD4. */
13137 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13138 thumb2_insn_r->reg_rec_count = 4;
13139
13140 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13141 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13142 }
13143 }
13144
13145 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13146 {
13147 record_buf[index_r] = reg_rn;
13148 thumb2_insn_r->reg_rec_count += 1;
13149 }
13150
13151 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13152 record_buf);
13153 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13154 record_buf_mem);
13155 return 0;
13156 }
13157
13158 /* Decodes thumb2 instruction type and invokes its record handler. */
13159
13160 static unsigned int
13161 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13162 {
13163 uint32_t op, op1, op2;
13164
13165 op = bit (thumb2_insn_r->arm_insn, 15);
13166 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13167 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13168
13169 if (op1 == 0x01)
13170 {
13171 if (!(op2 & 0x64 ))
13172 {
13173 /* Load/store multiple instruction. */
13174 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13175 }
13176 else if ((op2 & 0x64) == 0x4)
13177 {
13178 /* Load/store (dual/exclusive) and table branch instruction. */
13179 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13180 }
13181 else if ((op2 & 0x60) == 0x20)
13182 {
13183 /* Data-processing (shifted register). */
13184 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13185 }
13186 else if (op2 & 0x40)
13187 {
13188 /* Co-processor instructions. */
13189 return thumb2_record_coproc_insn (thumb2_insn_r);
13190 }
13191 }
13192 else if (op1 == 0x02)
13193 {
13194 if (op)
13195 {
13196 /* Branches and miscellaneous control instructions. */
13197 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13198 }
13199 else if (op2 & 0x20)
13200 {
13201 /* Data-processing (plain binary immediate) instruction. */
13202 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13203 }
13204 else
13205 {
13206 /* Data-processing (modified immediate). */
13207 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13208 }
13209 }
13210 else if (op1 == 0x03)
13211 {
13212 if (!(op2 & 0x71 ))
13213 {
13214 /* Store single data item. */
13215 return thumb2_record_str_single_data (thumb2_insn_r);
13216 }
13217 else if (!((op2 & 0x71) ^ 0x10))
13218 {
13219 /* Advanced SIMD or structure load/store instructions. */
13220 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13221 }
13222 else if (!((op2 & 0x67) ^ 0x01))
13223 {
13224 /* Load byte, memory hints instruction. */
13225 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13226 }
13227 else if (!((op2 & 0x67) ^ 0x03))
13228 {
13229 /* Load halfword, memory hints instruction. */
13230 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13231 }
13232 else if (!((op2 & 0x67) ^ 0x05))
13233 {
13234 /* Load word instruction. */
13235 return thumb2_record_ld_word (thumb2_insn_r);
13236 }
13237 else if (!((op2 & 0x70) ^ 0x20))
13238 {
13239 /* Data-processing (register) instruction. */
13240 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13241 }
13242 else if (!((op2 & 0x78) ^ 0x30))
13243 {
13244 /* Multiply, multiply accumulate, abs diff instruction. */
13245 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13246 }
13247 else if (!((op2 & 0x78) ^ 0x38))
13248 {
13249 /* Long multiply, long multiply accumulate, and divide. */
13250 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13251 }
13252 else if (op2 & 0x40)
13253 {
13254 /* Co-processor instructions. */
13255 return thumb2_record_coproc_insn (thumb2_insn_r);
13256 }
13257 }
13258
13259 return -1;
13260 }
13261
13262 namespace {
13263 /* Abstract memory reader. */
13264
13265 class abstract_memory_reader
13266 {
13267 public:
13268 /* Read LEN bytes of target memory at address MEMADDR, placing the
13269 results in GDB's memory at BUF. Return true on success. */
13270
13271 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13272 };
13273
13274 /* Instruction reader from real target. */
13275
13276 class instruction_reader : public abstract_memory_reader
13277 {
13278 public:
13279 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13280 {
13281 if (target_read_memory (memaddr, buf, len))
13282 return false;
13283 else
13284 return true;
13285 }
13286 };
13287
13288 } // namespace
13289
13290 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13291 and positive val on failure. */
13292
13293 static int
13294 extract_arm_insn (abstract_memory_reader& reader,
13295 insn_decode_record *insn_record, uint32_t insn_size)
13296 {
13297 gdb_byte buf[insn_size];
13298
13299 memset (&buf[0], 0, insn_size);
13300
13301 if (!reader.read (insn_record->this_addr, buf, insn_size))
13302 return 1;
13303 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13304 insn_size,
13305 gdbarch_byte_order_for_code (insn_record->gdbarch));
13306 return 0;
13307 }
13308
13309 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13310
13311 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13312 dispatch it. */
13313
13314 static int
13315 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13316 record_type_t record_type, uint32_t insn_size)
13317 {
13318
13319 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13320 instruction. */
13321 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13322 {
13323 arm_record_data_proc_misc_ld_str, /* 000. */
13324 arm_record_data_proc_imm, /* 001. */
13325 arm_record_ld_st_imm_offset, /* 010. */
13326 arm_record_ld_st_reg_offset, /* 011. */
13327 arm_record_ld_st_multiple, /* 100. */
13328 arm_record_b_bl, /* 101. */
13329 arm_record_asimd_vfp_coproc, /* 110. */
13330 arm_record_coproc_data_proc /* 111. */
13331 };
13332
13333 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13334 instruction. */
13335 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13336 { \
13337 thumb_record_shift_add_sub, /* 000. */
13338 thumb_record_add_sub_cmp_mov, /* 001. */
13339 thumb_record_ld_st_reg_offset, /* 010. */
13340 thumb_record_ld_st_imm_offset, /* 011. */
13341 thumb_record_ld_st_stack, /* 100. */
13342 thumb_record_misc, /* 101. */
13343 thumb_record_ldm_stm_swi, /* 110. */
13344 thumb_record_branch /* 111. */
13345 };
13346
13347 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13348 uint32_t insn_id = 0;
13349
13350 if (extract_arm_insn (reader, arm_record, insn_size))
13351 {
13352 if (record_debug)
13353 {
13354 printf_unfiltered (_("Process record: error reading memory at "
13355 "addr %s len = %d.\n"),
13356 paddress (arm_record->gdbarch,
13357 arm_record->this_addr), insn_size);
13358 }
13359 return -1;
13360 }
13361 else if (ARM_RECORD == record_type)
13362 {
13363 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13364 insn_id = bits (arm_record->arm_insn, 25, 27);
13365
13366 if (arm_record->cond == 0xf)
13367 ret = arm_record_extension_space (arm_record);
13368 else
13369 {
13370 /* If this insn has fallen into extension space
13371 then we need not decode it anymore. */
13372 ret = arm_handle_insn[insn_id] (arm_record);
13373 }
13374 if (ret != ARM_RECORD_SUCCESS)
13375 {
13376 arm_record_unsupported_insn (arm_record);
13377 ret = -1;
13378 }
13379 }
13380 else if (THUMB_RECORD == record_type)
13381 {
13382 /* As thumb does not have condition codes, we set negative. */
13383 arm_record->cond = -1;
13384 insn_id = bits (arm_record->arm_insn, 13, 15);
13385 ret = thumb_handle_insn[insn_id] (arm_record);
13386 if (ret != ARM_RECORD_SUCCESS)
13387 {
13388 arm_record_unsupported_insn (arm_record);
13389 ret = -1;
13390 }
13391 }
13392 else if (THUMB2_RECORD == record_type)
13393 {
13394 /* As thumb does not have condition codes, we set negative. */
13395 arm_record->cond = -1;
13396
13397 /* Swap first half of 32bit thumb instruction with second half. */
13398 arm_record->arm_insn
13399 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13400
13401 ret = thumb2_record_decode_insn_handler (arm_record);
13402
13403 if (ret != ARM_RECORD_SUCCESS)
13404 {
13405 arm_record_unsupported_insn (arm_record);
13406 ret = -1;
13407 }
13408 }
13409 else
13410 {
13411 /* Throw assertion. */
13412 gdb_assert_not_reached ("not a valid instruction, could not decode");
13413 }
13414
13415 return ret;
13416 }
13417
13418 #if GDB_SELF_TEST
13419 namespace selftests {
13420
13421 /* Provide both 16-bit and 32-bit thumb instructions. */
13422
13423 class instruction_reader_thumb : public abstract_memory_reader
13424 {
13425 public:
13426 template<size_t SIZE>
13427 instruction_reader_thumb (enum bfd_endian endian,
13428 const uint16_t (&insns)[SIZE])
13429 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13430 {}
13431
13432 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13433 {
13434 SELF_CHECK (len == 4 || len == 2);
13435 SELF_CHECK (memaddr % 2 == 0);
13436 SELF_CHECK ((memaddr / 2) < m_insns_size);
13437
13438 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13439 if (len == 4)
13440 {
13441 store_unsigned_integer (&buf[2], 2, m_endian,
13442 m_insns[memaddr / 2 + 1]);
13443 }
13444 return true;
13445 }
13446
13447 private:
13448 enum bfd_endian m_endian;
13449 const uint16_t *m_insns;
13450 size_t m_insns_size;
13451 };
13452
13453 static void
13454 arm_record_test (void)
13455 {
13456 struct gdbarch_info info;
13457 info.bfd_arch_info = bfd_scan_arch ("arm");
13458
13459 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13460
13461 SELF_CHECK (gdbarch != NULL);
13462
13463 /* 16-bit Thumb instructions. */
13464 {
13465 insn_decode_record arm_record;
13466
13467 memset (&arm_record, 0, sizeof (insn_decode_record));
13468 arm_record.gdbarch = gdbarch;
13469
13470 static const uint16_t insns[] = {
13471 /* db b2 uxtb r3, r3 */
13472 0xb2db,
13473 /* cd 58 ldr r5, [r1, r3] */
13474 0x58cd,
13475 };
13476
13477 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13478 instruction_reader_thumb reader (endian, insns);
13479 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13480 THUMB_INSN_SIZE_BYTES);
13481
13482 SELF_CHECK (ret == 0);
13483 SELF_CHECK (arm_record.mem_rec_count == 0);
13484 SELF_CHECK (arm_record.reg_rec_count == 1);
13485 SELF_CHECK (arm_record.arm_regs[0] == 3);
13486
13487 arm_record.this_addr += 2;
13488 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13489 THUMB_INSN_SIZE_BYTES);
13490
13491 SELF_CHECK (ret == 0);
13492 SELF_CHECK (arm_record.mem_rec_count == 0);
13493 SELF_CHECK (arm_record.reg_rec_count == 1);
13494 SELF_CHECK (arm_record.arm_regs[0] == 5);
13495 }
13496
13497 /* 32-bit Thumb-2 instructions. */
13498 {
13499 insn_decode_record arm_record;
13500
13501 memset (&arm_record, 0, sizeof (insn_decode_record));
13502 arm_record.gdbarch = gdbarch;
13503
13504 static const uint16_t insns[] = {
13505 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13506 0xee1d, 0x7f70,
13507 };
13508
13509 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13510 instruction_reader_thumb reader (endian, insns);
13511 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13512 THUMB2_INSN_SIZE_BYTES);
13513
13514 SELF_CHECK (ret == 0);
13515 SELF_CHECK (arm_record.mem_rec_count == 0);
13516 SELF_CHECK (arm_record.reg_rec_count == 1);
13517 SELF_CHECK (arm_record.arm_regs[0] == 7);
13518 }
13519 }
13520
13521 /* Instruction reader from manually cooked instruction sequences. */
13522
13523 class test_arm_instruction_reader : public arm_instruction_reader
13524 {
13525 public:
13526 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13527 : m_insns (insns)
13528 {}
13529
13530 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13531 {
13532 SELF_CHECK (memaddr % 4 == 0);
13533 SELF_CHECK (memaddr / 4 < m_insns.size ());
13534
13535 return m_insns[memaddr / 4];
13536 }
13537
13538 private:
13539 const gdb::array_view<const uint32_t> m_insns;
13540 };
13541
13542 static void
13543 arm_analyze_prologue_test ()
13544 {
13545 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13546 {
13547 struct gdbarch_info info;
13548 info.byte_order = endianness;
13549 info.byte_order_for_code = endianness;
13550 info.bfd_arch_info = bfd_scan_arch ("arm");
13551
13552 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13553
13554 SELF_CHECK (gdbarch != NULL);
13555
13556 /* The "sub" instruction contains an immediate value rotate count of 0,
13557 which resulted in a 32-bit shift of a 32-bit value, caught by
13558 UBSan. */
13559 const uint32_t insns[] = {
13560 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13561 0xe1a05000, /* mov r5, r0 */
13562 0xe5903020, /* ldr r3, [r0, #32] */
13563 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13564 };
13565
13566 test_arm_instruction_reader mem_reader (insns);
13567 arm_prologue_cache cache;
13568 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13569
13570 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13571 }
13572 }
13573
13574 } // namespace selftests
13575 #endif /* GDB_SELF_TEST */
13576
13577 /* Cleans up local record registers and memory allocations. */
13578
13579 static void
13580 deallocate_reg_mem (insn_decode_record *record)
13581 {
13582 xfree (record->arm_regs);
13583 xfree (record->arm_mems);
13584 }
13585
13586
13587 /* Parse the current instruction and record the values of the registers and
13588 memory that will be changed in current instruction to record_arch_list".
13589 Return -1 if something is wrong. */
13590
13591 int
13592 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13593 CORE_ADDR insn_addr)
13594 {
13595
13596 uint32_t no_of_rec = 0;
13597 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13598 ULONGEST t_bit = 0, insn_id = 0;
13599
13600 ULONGEST u_regval = 0;
13601
13602 insn_decode_record arm_record;
13603
13604 memset (&arm_record, 0, sizeof (insn_decode_record));
13605 arm_record.regcache = regcache;
13606 arm_record.this_addr = insn_addr;
13607 arm_record.gdbarch = gdbarch;
13608
13609
13610 if (record_debug > 1)
13611 {
13612 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13613 "addr = %s\n",
13614 paddress (gdbarch, arm_record.this_addr));
13615 }
13616
13617 instruction_reader reader;
13618 if (extract_arm_insn (reader, &arm_record, 2))
13619 {
13620 if (record_debug)
13621 {
13622 printf_unfiltered (_("Process record: error reading memory at "
13623 "addr %s len = %d.\n"),
13624 paddress (arm_record.gdbarch,
13625 arm_record.this_addr), 2);
13626 }
13627 return -1;
13628 }
13629
13630 /* Check the insn, whether it is thumb or arm one. */
13631
13632 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13633 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13634
13635
13636 if (!(u_regval & t_bit))
13637 {
13638 /* We are decoding arm insn. */
13639 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13640 }
13641 else
13642 {
13643 insn_id = bits (arm_record.arm_insn, 11, 15);
13644 /* is it thumb2 insn? */
13645 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13646 {
13647 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13648 THUMB2_INSN_SIZE_BYTES);
13649 }
13650 else
13651 {
13652 /* We are decoding thumb insn. */
13653 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13654 THUMB_INSN_SIZE_BYTES);
13655 }
13656 }
13657
13658 if (0 == ret)
13659 {
13660 /* Record registers. */
13661 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13662 if (arm_record.arm_regs)
13663 {
13664 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13665 {
13666 if (record_full_arch_list_add_reg
13667 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13668 ret = -1;
13669 }
13670 }
13671 /* Record memories. */
13672 if (arm_record.arm_mems)
13673 {
13674 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13675 {
13676 if (record_full_arch_list_add_mem
13677 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13678 arm_record.arm_mems[no_of_rec].len))
13679 ret = -1;
13680 }
13681 }
13682
13683 if (record_full_arch_list_add_end ())
13684 ret = -1;
13685 }
13686
13687
13688 deallocate_reg_mem (&arm_record);
13689
13690 return ret;
13691 }
13692
13693 /* See arm-tdep.h. */
13694
13695 const target_desc *
13696 arm_read_description (arm_fp_type fp_type)
13697 {
13698 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13699
13700 if (tdesc == nullptr)
13701 {
13702 tdesc = arm_create_target_description (fp_type);
13703 tdesc_arm_list[fp_type] = tdesc;
13704 }
13705
13706 return tdesc;
13707 }
13708
13709 /* See arm-tdep.h. */
13710
13711 const target_desc *
13712 arm_read_mprofile_description (arm_m_profile_type m_type)
13713 {
13714 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13715
13716 if (tdesc == nullptr)
13717 {
13718 tdesc = arm_create_mprofile_target_description (m_type);
13719 tdesc_arm_mprofile_list[m_type] = tdesc;
13720 }
13721
13722 return tdesc;
13723 }