gdb: rs6000_dwarf2_reg_to_regnum return -1 for unknown register number
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 struct arm_mapping_symbol
85 {
86 CORE_ADDR value;
87 char type;
88
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
91 };
92
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94
95 struct arm_per_bfd
96 {
97 explicit arm_per_bfd (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
100 {}
101
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
103
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
108 index).
109
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113
114 /* For each corresponding element of section_maps above, is this vector
115 sorted. */
116 std::unique_ptr<bool[]> section_maps_sorted;
117 };
118
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key<arm_per_bfd> arm_bfd_data_key;
121
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
125
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
129 {
130 "auto",
131 "softfpa",
132 "fpa",
133 "softvfp",
134 "vfp",
135 NULL
136 };
137
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
141
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
144 {
145 "auto",
146 "APCS",
147 "AAPCS",
148 NULL
149 };
150
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
154
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
157 {
158 "auto",
159 "arm",
160 "thumb",
161 NULL
162 };
163
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
166
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
171 static const struct
172 {
173 const char *name;
174 int regnum;
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
177 { "r0", 0 },
178 { "r1", 1 },
179 { "r2", 2 },
180 { "r3", 3 },
181 { "r4", 4 },
182 { "r5", 5 },
183 { "r6", 6 },
184 { "r7", 7 },
185 { "r8", 8 },
186 { "r9", 9 },
187 { "r10", 10 },
188 { "r11", 11 },
189 { "r12", 12 },
190 { "r13", 13 },
191 { "r14", 14 },
192 { "r15", 15 },
193 /* Synonyms (argument and variable registers). */
194 { "a1", 0 },
195 { "a2", 1 },
196 { "a3", 2 },
197 { "a4", 3 },
198 { "v1", 4 },
199 { "v2", 5 },
200 { "v3", 6 },
201 { "v4", 7 },
202 { "v5", 8 },
203 { "v6", 9 },
204 { "v7", 10 },
205 { "v8", 11 },
206 /* Other platform-specific names for r9. */
207 { "sb", 9 },
208 { "tr", 9 },
209 /* Special names. */
210 { "ip", 12 },
211 { "lr", 14 },
212 /* Names used by GCC (not listed in the ARM EABI). */
213 { "sl", 10 },
214 /* A special name from the older ATPCS. */
215 { "wr", 7 },
216 };
217
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
226
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
229
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
232
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
235
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239
240 /* This is used to keep the bfd arch_info in sync with the disassembly
241 style. */
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
246 const char *);
247
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
254
255 static CORE_ADDR
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
257
258
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
265 NULL,
266 };
267
268 struct arm_prologue_cache
269 {
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
273 CORE_ADDR prev_sp;
274
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
278
279 int framesize;
280
281 /* The register used to hold the frame pointer for this frame. */
282 int framereg;
283
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
286 };
287
288 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
289 CORE_ADDR prologue_start,
290 CORE_ADDR prologue_end,
291 struct arm_prologue_cache *cache);
292
293 /* Architecture version for displaced stepping. This effects the behaviour of
294 certain instructions, and really should not be hard-wired. */
295
296 #define DISPLACED_STEPPING_ARCH_VERSION 5
297
298 /* See arm-tdep.h. */
299
300 bool arm_apcs_32 = true;
301
302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
303
304 int
305 arm_psr_thumb_bit (struct gdbarch *gdbarch)
306 {
307 if (gdbarch_tdep (gdbarch)->is_m)
308 return XPSR_T;
309 else
310 return CPSR_T;
311 }
312
313 /* Determine if the processor is currently executing in Thumb mode. */
314
315 int
316 arm_is_thumb (struct regcache *regcache)
317 {
318 ULONGEST cpsr;
319 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
320
321 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
322
323 return (cpsr & t_bit) != 0;
324 }
325
326 /* Determine if FRAME is executing in Thumb mode. */
327
328 int
329 arm_frame_is_thumb (struct frame_info *frame)
330 {
331 CORE_ADDR cpsr;
332 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
333
334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
335 directly (from a signal frame or dummy frame) or by interpreting
336 the saved LR (from a prologue or DWARF frame). So consult it and
337 trust the unwinders. */
338 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
339
340 return (cpsr & t_bit) != 0;
341 }
342
343 /* Search for the mapping symbol covering MEMADDR. If one is found,
344 return its type. Otherwise, return 0. If START is non-NULL,
345 set *START to the location of the mapping symbol. */
346
347 static char
348 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
349 {
350 struct obj_section *sec;
351
352 /* If there are mapping symbols, consult them. */
353 sec = find_pc_section (memaddr);
354 if (sec != NULL)
355 {
356 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
357 if (data != NULL)
358 {
359 unsigned int section_idx = sec->the_bfd_section->index;
360 arm_mapping_symbol_vec &map
361 = data->section_maps[section_idx];
362
363 /* Sort the vector on first use. */
364 if (!data->section_maps_sorted[section_idx])
365 {
366 std::sort (map.begin (), map.end ());
367 data->section_maps_sorted[section_idx] = true;
368 }
369
370 struct arm_mapping_symbol map_key
371 = { memaddr - obj_section_addr (sec), 0 };
372 arm_mapping_symbol_vec::const_iterator it
373 = std::lower_bound (map.begin (), map.end (), map_key);
374
375 /* std::lower_bound finds the earliest ordered insertion
376 point. If the symbol at this position starts at this exact
377 address, we use that; otherwise, the preceding
378 mapping symbol covers this address. */
379 if (it < map.end ())
380 {
381 if (it->value == map_key.value)
382 {
383 if (start)
384 *start = it->value + obj_section_addr (sec);
385 return it->type;
386 }
387 }
388
389 if (it > map.begin ())
390 {
391 arm_mapping_symbol_vec::const_iterator prev_it
392 = it - 1;
393
394 if (start)
395 *start = prev_it->value + obj_section_addr (sec);
396 return prev_it->type;
397 }
398 }
399 }
400
401 return 0;
402 }
403
404 /* Determine if the program counter specified in MEMADDR is in a Thumb
405 function. This function should be called for addresses unrelated to
406 any executing frame; otherwise, prefer arm_frame_is_thumb. */
407
408 int
409 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
410 {
411 struct bound_minimal_symbol sym;
412 char type;
413 arm_displaced_step_closure *dsc
414 = ((arm_displaced_step_closure * )
415 get_displaced_step_closure_by_addr (memaddr));
416
417 /* If checking the mode of displaced instruction in copy area, the mode
418 should be determined by instruction on the original address. */
419 if (dsc)
420 {
421 if (debug_displaced)
422 fprintf_unfiltered (gdb_stdlog,
423 "displaced: check mode of %.8lx instead of %.8lx\n",
424 (unsigned long) dsc->insn_addr,
425 (unsigned long) memaddr);
426 memaddr = dsc->insn_addr;
427 }
428
429 /* If bit 0 of the address is set, assume this is a Thumb address. */
430 if (IS_THUMB_ADDR (memaddr))
431 return 1;
432
433 /* If the user wants to override the symbol table, let him. */
434 if (strcmp (arm_force_mode_string, "arm") == 0)
435 return 0;
436 if (strcmp (arm_force_mode_string, "thumb") == 0)
437 return 1;
438
439 /* ARM v6-M and v7-M are always in Thumb mode. */
440 if (gdbarch_tdep (gdbarch)->is_m)
441 return 1;
442
443 /* If there are mapping symbols, consult them. */
444 type = arm_find_mapping_symbol (memaddr, NULL);
445 if (type)
446 return type == 't';
447
448 /* Thumb functions have a "special" bit set in minimal symbols. */
449 sym = lookup_minimal_symbol_by_pc (memaddr);
450 if (sym.minsym)
451 return (MSYMBOL_IS_SPECIAL (sym.minsym));
452
453 /* If the user wants to override the fallback mode, let them. */
454 if (strcmp (arm_fallback_mode_string, "arm") == 0)
455 return 0;
456 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
457 return 1;
458
459 /* If we couldn't find any symbol, but we're talking to a running
460 target, then trust the current value of $cpsr. This lets
461 "display/i $pc" always show the correct mode (though if there is
462 a symbol table we will not reach here, so it still may not be
463 displayed in the mode it will be executed). */
464 if (target_has_registers ())
465 return arm_frame_is_thumb (get_current_frame ());
466
467 /* Otherwise we're out of luck; we assume ARM. */
468 return 0;
469 }
470
471 /* Determine if the address specified equals any of these magic return
472 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
473 architectures.
474
475 From ARMv6-M Reference Manual B1.5.8
476 Table B1-5 Exception return behavior
477
478 EXC_RETURN Return To Return Stack
479 0xFFFFFFF1 Handler mode Main
480 0xFFFFFFF9 Thread mode Main
481 0xFFFFFFFD Thread mode Process
482
483 From ARMv7-M Reference Manual B1.5.8
484 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
485
486 EXC_RETURN Return To Return Stack
487 0xFFFFFFF1 Handler mode Main
488 0xFFFFFFF9 Thread mode Main
489 0xFFFFFFFD Thread mode Process
490
491 Table B1-9 EXC_RETURN definition of exception return behavior, with
492 FP
493
494 EXC_RETURN Return To Return Stack Frame Type
495 0xFFFFFFE1 Handler mode Main Extended
496 0xFFFFFFE9 Thread mode Main Extended
497 0xFFFFFFED Thread mode Process Extended
498 0xFFFFFFF1 Handler mode Main Basic
499 0xFFFFFFF9 Thread mode Main Basic
500 0xFFFFFFFD Thread mode Process Basic
501
502 For more details see "B1.5.8 Exception return behavior"
503 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
504
505 In the ARMv8-M Architecture Technical Reference also adds
506 for implementations without the Security Extension:
507
508 EXC_RETURN Condition
509 0xFFFFFFB0 Return to Handler mode.
510 0xFFFFFFB8 Return to Thread mode using the main stack.
511 0xFFFFFFBC Return to Thread mode using the process stack. */
512
513 static int
514 arm_m_addr_is_magic (CORE_ADDR addr)
515 {
516 switch (addr)
517 {
518 /* Values from ARMv8-M Architecture Technical Reference. */
519 case 0xffffffb0:
520 case 0xffffffb8:
521 case 0xffffffbc:
522 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
523 the exception return behavior. */
524 case 0xffffffe1:
525 case 0xffffffe9:
526 case 0xffffffed:
527 case 0xfffffff1:
528 case 0xfffffff9:
529 case 0xfffffffd:
530 /* Address is magic. */
531 return 1;
532
533 default:
534 /* Address is not magic. */
535 return 0;
536 }
537 }
538
539 /* Remove useless bits from addresses in a running program. */
540 static CORE_ADDR
541 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
542 {
543 /* On M-profile devices, do not strip the low bit from EXC_RETURN
544 (the magic exception return address). */
545 if (gdbarch_tdep (gdbarch)->is_m
546 && arm_m_addr_is_magic (val))
547 return val;
548
549 if (arm_apcs_32)
550 return UNMAKE_THUMB_ADDR (val);
551 else
552 return (val & 0x03fffffc);
553 }
554
555 /* Return 1 if PC is the start of a compiler helper function which
556 can be safely ignored during prologue skipping. IS_THUMB is true
557 if the function is known to be a Thumb function due to the way it
558 is being called. */
559 static int
560 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
561 {
562 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
563 struct bound_minimal_symbol msym;
564
565 msym = lookup_minimal_symbol_by_pc (pc);
566 if (msym.minsym != NULL
567 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
568 && msym.minsym->linkage_name () != NULL)
569 {
570 const char *name = msym.minsym->linkage_name ();
571
572 /* The GNU linker's Thumb call stub to foo is named
573 __foo_from_thumb. */
574 if (strstr (name, "_from_thumb") != NULL)
575 name += 2;
576
577 /* On soft-float targets, __truncdfsf2 is called to convert promoted
578 arguments to their argument types in non-prototyped
579 functions. */
580 if (startswith (name, "__truncdfsf2"))
581 return 1;
582 if (startswith (name, "__aeabi_d2f"))
583 return 1;
584
585 /* Internal functions related to thread-local storage. */
586 if (startswith (name, "__tls_get_addr"))
587 return 1;
588 if (startswith (name, "__aeabi_read_tp"))
589 return 1;
590 }
591 else
592 {
593 /* If we run against a stripped glibc, we may be unable to identify
594 special functions by name. Check for one important case,
595 __aeabi_read_tp, by comparing the *code* against the default
596 implementation (this is hand-written ARM assembler in glibc). */
597
598 if (!is_thumb
599 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
600 == 0xe3e00a0f /* mov r0, #0xffff0fff */
601 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
602 == 0xe240f01f) /* sub pc, r0, #31 */
603 return 1;
604 }
605
606 return 0;
607 }
608
609 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
610 the first 16-bit of instruction, and INSN2 is the second 16-bit of
611 instruction. */
612 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
613 ((bits ((insn1), 0, 3) << 12) \
614 | (bits ((insn1), 10, 10) << 11) \
615 | (bits ((insn2), 12, 14) << 8) \
616 | bits ((insn2), 0, 7))
617
618 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
619 the 32-bit instruction. */
620 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
621 ((bits ((insn), 16, 19) << 12) \
622 | bits ((insn), 0, 11))
623
624 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
625
626 static unsigned int
627 thumb_expand_immediate (unsigned int imm)
628 {
629 unsigned int count = imm >> 7;
630
631 if (count < 8)
632 switch (count / 2)
633 {
634 case 0:
635 return imm & 0xff;
636 case 1:
637 return (imm & 0xff) | ((imm & 0xff) << 16);
638 case 2:
639 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
640 case 3:
641 return (imm & 0xff) | ((imm & 0xff) << 8)
642 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
643 }
644
645 return (0x80 | (imm & 0x7f)) << (32 - count);
646 }
647
648 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
649 epilogue, 0 otherwise. */
650
651 static int
652 thumb_instruction_restores_sp (unsigned short insn)
653 {
654 return (insn == 0x46bd /* mov sp, r7 */
655 || (insn & 0xff80) == 0xb000 /* add sp, imm */
656 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
657 }
658
659 /* Analyze a Thumb prologue, looking for a recognizable stack frame
660 and frame pointer. Scan until we encounter a store that could
661 clobber the stack frame unexpectedly, or an unknown instruction.
662 Return the last address which is definitely safe to skip for an
663 initial breakpoint. */
664
665 static CORE_ADDR
666 thumb_analyze_prologue (struct gdbarch *gdbarch,
667 CORE_ADDR start, CORE_ADDR limit,
668 struct arm_prologue_cache *cache)
669 {
670 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
671 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
672 int i;
673 pv_t regs[16];
674 CORE_ADDR offset;
675 CORE_ADDR unrecognized_pc = 0;
676
677 for (i = 0; i < 16; i++)
678 regs[i] = pv_register (i, 0);
679 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
680
681 while (start < limit)
682 {
683 unsigned short insn;
684
685 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
686
687 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
688 {
689 int regno;
690 int mask;
691
692 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
693 break;
694
695 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
696 whether to save LR (R14). */
697 mask = (insn & 0xff) | ((insn & 0x100) << 6);
698
699 /* Calculate offsets of saved R0-R7 and LR. */
700 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
701 if (mask & (1 << regno))
702 {
703 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
704 -4);
705 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
706 }
707 }
708 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
709 {
710 offset = (insn & 0x7f) << 2; /* get scaled offset */
711 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
712 -offset);
713 }
714 else if (thumb_instruction_restores_sp (insn))
715 {
716 /* Don't scan past the epilogue. */
717 break;
718 }
719 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
720 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
721 (insn & 0xff) << 2);
722 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
723 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
724 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
725 bits (insn, 6, 8));
726 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
727 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
728 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
729 bits (insn, 0, 7));
730 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
731 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
732 && pv_is_constant (regs[bits (insn, 3, 5)]))
733 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
734 regs[bits (insn, 6, 8)]);
735 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
736 && pv_is_constant (regs[bits (insn, 3, 6)]))
737 {
738 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
739 int rm = bits (insn, 3, 6);
740 regs[rd] = pv_add (regs[rd], regs[rm]);
741 }
742 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
743 {
744 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
745 int src_reg = (insn & 0x78) >> 3;
746 regs[dst_reg] = regs[src_reg];
747 }
748 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
749 {
750 /* Handle stores to the stack. Normally pushes are used,
751 but with GCC -mtpcs-frame, there may be other stores
752 in the prologue to create the frame. */
753 int regno = (insn >> 8) & 0x7;
754 pv_t addr;
755
756 offset = (insn & 0xff) << 2;
757 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[regno]);
763 }
764 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
765 {
766 int rd = bits (insn, 0, 2);
767 int rn = bits (insn, 3, 5);
768 pv_t addr;
769
770 offset = bits (insn, 6, 10) << 2;
771 addr = pv_add_constant (regs[rn], offset);
772
773 if (stack.store_would_trash (addr))
774 break;
775
776 stack.store (addr, 4, regs[rd]);
777 }
778 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
779 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
780 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
781 /* Ignore stores of argument registers to the stack. */
782 ;
783 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
784 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
785 /* Ignore block loads from the stack, potentially copying
786 parameters from memory. */
787 ;
788 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
789 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
790 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
791 /* Similarly ignore single loads from the stack. */
792 ;
793 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
794 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
795 /* Skip register copies, i.e. saves to another register
796 instead of the stack. */
797 ;
798 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
799 /* Recognize constant loads; even with small stacks these are necessary
800 on Thumb. */
801 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
802 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
803 {
804 /* Constant pool loads, for the same reason. */
805 unsigned int constant;
806 CORE_ADDR loc;
807
808 loc = start + 4 + bits (insn, 0, 7) * 4;
809 constant = read_memory_unsigned_integer (loc, 4, byte_order);
810 regs[bits (insn, 8, 10)] = pv_constant (constant);
811 }
812 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
813 {
814 unsigned short inst2;
815
816 inst2 = read_code_unsigned_integer (start + 2, 2,
817 byte_order_for_code);
818
819 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
820 {
821 /* BL, BLX. Allow some special function calls when
822 skipping the prologue; GCC generates these before
823 storing arguments to the stack. */
824 CORE_ADDR nextpc;
825 int j1, j2, imm1, imm2;
826
827 imm1 = sbits (insn, 0, 10);
828 imm2 = bits (inst2, 0, 10);
829 j1 = bit (inst2, 13);
830 j2 = bit (inst2, 11);
831
832 offset = ((imm1 << 12) + (imm2 << 1));
833 offset ^= ((!j2) << 22) | ((!j1) << 23);
834
835 nextpc = start + 4 + offset;
836 /* For BLX make sure to clear the low bits. */
837 if (bit (inst2, 12) == 0)
838 nextpc = nextpc & 0xfffffffc;
839
840 if (!skip_prologue_function (gdbarch, nextpc,
841 bit (inst2, 12) != 0))
842 break;
843 }
844
845 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
846 { registers } */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 {
849 pv_t addr = regs[bits (insn, 0, 3)];
850 int regno;
851
852 if (stack.store_would_trash (addr))
853 break;
854
855 /* Calculate offsets of saved registers. */
856 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
857 if (inst2 & (1 << regno))
858 {
859 addr = pv_add_constant (addr, -4);
860 stack.store (addr, 4, regs[regno]);
861 }
862
863 if (insn & 0x0020)
864 regs[bits (insn, 0, 3)] = addr;
865 }
866
867 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
868 [Rn, #+/-imm]{!} */
869 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 {
871 int regno1 = bits (inst2, 12, 15);
872 int regno2 = bits (inst2, 8, 11);
873 pv_t addr = regs[bits (insn, 0, 3)];
874
875 offset = inst2 & 0xff;
876 if (insn & 0x0080)
877 addr = pv_add_constant (addr, offset);
878 else
879 addr = pv_add_constant (addr, -offset);
880
881 if (stack.store_would_trash (addr))
882 break;
883
884 stack.store (addr, 4, regs[regno1]);
885 stack.store (pv_add_constant (addr, 4),
886 4, regs[regno2]);
887
888 if (insn & 0x0020)
889 regs[bits (insn, 0, 3)] = addr;
890 }
891
892 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
893 && (inst2 & 0x0c00) == 0x0c00
894 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
895 {
896 int regno = bits (inst2, 12, 15);
897 pv_t addr = regs[bits (insn, 0, 3)];
898
899 offset = inst2 & 0xff;
900 if (inst2 & 0x0200)
901 addr = pv_add_constant (addr, offset);
902 else
903 addr = pv_add_constant (addr, -offset);
904
905 if (stack.store_would_trash (addr))
906 break;
907
908 stack.store (addr, 4, regs[regno]);
909
910 if (inst2 & 0x0100)
911 regs[bits (insn, 0, 3)] = addr;
912 }
913
914 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 {
917 int regno = bits (inst2, 12, 15);
918 pv_t addr;
919
920 offset = inst2 & 0xfff;
921 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
922
923 if (stack.store_would_trash (addr))
924 break;
925
926 stack.store (addr, 4, regs[regno]);
927 }
928
929 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore stores of argument registers to the stack. */
932 ;
933
934 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
935 && (inst2 & 0x0d00) == 0x0c00
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Ignore stores of argument registers to the stack. */
938 ;
939
940 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
941 { registers } */
942 && (inst2 & 0x8000) == 0x0000
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Ignore block loads from the stack, potentially copying
945 parameters from memory. */
946 ;
947
948 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
949 [Rn, #+/-imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
951 /* Similarly ignore dual loads from the stack. */
952 ;
953
954 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
955 && (inst2 & 0x0d00) == 0x0c00
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 /* Similarly ignore single loads from the stack. */
958 ;
959
960 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
961 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
962 /* Similarly ignore single loads from the stack. */
963 ;
964
965 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
986 }
987
988 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)],
997 - (CORE_ADDR) thumb_expand_immediate (imm));
998 }
999
1000 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1002 {
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1006
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1009 }
1010
1011 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1012 {
1013 unsigned int imm = ((bits (insn, 10, 10) << 11)
1014 | (bits (inst2, 12, 14) << 8)
1015 | bits (inst2, 0, 7));
1016
1017 regs[bits (inst2, 8, 11)]
1018 = pv_constant (thumb_expand_immediate (imm));
1019 }
1020
1021 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1022 {
1023 unsigned int imm
1024 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1025
1026 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1027 }
1028
1029 else if (insn == 0xea5f /* mov.w Rd,Rm */
1030 && (inst2 & 0xf0f0) == 0)
1031 {
1032 int dst_reg = (inst2 & 0x0f00) >> 8;
1033 int src_reg = inst2 & 0xf;
1034 regs[dst_reg] = regs[src_reg];
1035 }
1036
1037 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1038 {
1039 /* Constant pool loads. */
1040 unsigned int constant;
1041 CORE_ADDR loc;
1042
1043 offset = bits (inst2, 0, 11);
1044 if (insn & 0x0080)
1045 loc = start + 4 + offset;
1046 else
1047 loc = start + 4 - offset;
1048
1049 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1050 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1051 }
1052
1053 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1054 {
1055 /* Constant pool loads. */
1056 unsigned int constant;
1057 CORE_ADDR loc;
1058
1059 offset = bits (inst2, 0, 7) << 2;
1060 if (insn & 0x0080)
1061 loc = start + 4 + offset;
1062 else
1063 loc = start + 4 - offset;
1064
1065 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1066 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1067
1068 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1069 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1070 }
1071
1072 else if (thumb2_instruction_changes_pc (insn, inst2))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086 else if (thumb_instruction_changes_pc (insn))
1087 {
1088 /* Don't scan past anything that might change control flow. */
1089 break;
1090 }
1091 else
1092 {
1093 /* The optimizer might shove anything into the prologue,
1094 so we just skip what we don't recognize. */
1095 unrecognized_pc = start;
1096 }
1097
1098 start += 2;
1099 }
1100
1101 if (arm_debug)
1102 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1103 paddress (gdbarch, start));
1104
1105 if (unrecognized_pc == 0)
1106 unrecognized_pc = start;
1107
1108 if (cache == NULL)
1109 return unrecognized_pc;
1110
1111 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1112 {
1113 /* Frame pointer is fp. Frame size is constant. */
1114 cache->framereg = ARM_FP_REGNUM;
1115 cache->framesize = -regs[ARM_FP_REGNUM].k;
1116 }
1117 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1118 {
1119 /* Frame pointer is r7. Frame size is constant. */
1120 cache->framereg = THUMB_FP_REGNUM;
1121 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1122 }
1123 else
1124 {
1125 /* Try the stack pointer... this is a bit desperate. */
1126 cache->framereg = ARM_SP_REGNUM;
1127 cache->framesize = -regs[ARM_SP_REGNUM].k;
1128 }
1129
1130 for (i = 0; i < 16; i++)
1131 if (stack.find_reg (gdbarch, i, &offset))
1132 cache->saved_regs[i].addr = offset;
1133
1134 return unrecognized_pc;
1135 }
1136
1137
1138 /* Try to analyze the instructions starting from PC, which load symbol
1139 __stack_chk_guard. Return the address of instruction after loading this
1140 symbol, set the dest register number to *BASEREG, and set the size of
1141 instructions for loading symbol in OFFSET. Return 0 if instructions are
1142 not recognized. */
1143
1144 static CORE_ADDR
1145 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1146 unsigned int *destreg, int *offset)
1147 {
1148 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1149 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1150 unsigned int low, high, address;
1151
1152 address = 0;
1153 if (is_thumb)
1154 {
1155 unsigned short insn1
1156 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1157
1158 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1159 {
1160 *destreg = bits (insn1, 8, 10);
1161 *offset = 2;
1162 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1163 address = read_memory_unsigned_integer (address, 4,
1164 byte_order_for_code);
1165 }
1166 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1167 {
1168 unsigned short insn2
1169 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1170
1171 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1172
1173 insn1
1174 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1175 insn2
1176 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1177
1178 /* movt Rd, #const */
1179 if ((insn1 & 0xfbc0) == 0xf2c0)
1180 {
1181 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1182 *destreg = bits (insn2, 8, 11);
1183 *offset = 8;
1184 address = (high << 16 | low);
1185 }
1186 }
1187 }
1188 else
1189 {
1190 unsigned int insn
1191 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1192
1193 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1194 {
1195 address = bits (insn, 0, 11) + pc + 8;
1196 address = read_memory_unsigned_integer (address, 4,
1197 byte_order_for_code);
1198
1199 *destreg = bits (insn, 12, 15);
1200 *offset = 4;
1201 }
1202 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1203 {
1204 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1205
1206 insn
1207 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1208
1209 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1210 {
1211 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1212 *destreg = bits (insn, 12, 15);
1213 *offset = 8;
1214 address = (high << 16 | low);
1215 }
1216 }
1217 }
1218
1219 return address;
1220 }
1221
1222 /* Try to skip a sequence of instructions used for stack protector. If PC
1223 points to the first instruction of this sequence, return the address of
1224 first instruction after this sequence, otherwise, return original PC.
1225
1226 On arm, this sequence of instructions is composed of mainly three steps,
1227 Step 1: load symbol __stack_chk_guard,
1228 Step 2: load from address of __stack_chk_guard,
1229 Step 3: store it to somewhere else.
1230
1231 Usually, instructions on step 2 and step 3 are the same on various ARM
1232 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1233 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1234 instructions in step 1 vary from different ARM architectures. On ARMv7,
1235 they are,
1236
1237 movw Rn, #:lower16:__stack_chk_guard
1238 movt Rn, #:upper16:__stack_chk_guard
1239
1240 On ARMv5t, it is,
1241
1242 ldr Rn, .Label
1243 ....
1244 .Lable:
1245 .word __stack_chk_guard
1246
1247 Since ldr/str is a very popular instruction, we can't use them as
1248 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1249 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1250 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1251
1252 static CORE_ADDR
1253 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1254 {
1255 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1256 unsigned int basereg;
1257 struct bound_minimal_symbol stack_chk_guard;
1258 int offset;
1259 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1260 CORE_ADDR addr;
1261
1262 /* Try to parse the instructions in Step 1. */
1263 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1264 &basereg, &offset);
1265 if (!addr)
1266 return pc;
1267
1268 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1269 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1270 Otherwise, this sequence cannot be for stack protector. */
1271 if (stack_chk_guard.minsym == NULL
1272 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1273 return pc;
1274
1275 if (is_thumb)
1276 {
1277 unsigned int destreg;
1278 unsigned short insn
1279 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1280
1281 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1282 if ((insn & 0xf800) != 0x6800)
1283 return pc;
1284 if (bits (insn, 3, 5) != basereg)
1285 return pc;
1286 destreg = bits (insn, 0, 2);
1287
1288 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1289 byte_order_for_code);
1290 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1291 if ((insn & 0xf800) != 0x6000)
1292 return pc;
1293 if (destreg != bits (insn, 0, 2))
1294 return pc;
1295 }
1296 else
1297 {
1298 unsigned int destreg;
1299 unsigned int insn
1300 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1301
1302 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1303 if ((insn & 0x0e500000) != 0x04100000)
1304 return pc;
1305 if (bits (insn, 16, 19) != basereg)
1306 return pc;
1307 destreg = bits (insn, 12, 15);
1308 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1309 insn = read_code_unsigned_integer (pc + offset + 4,
1310 4, byte_order_for_code);
1311 if ((insn & 0x0e500000) != 0x04000000)
1312 return pc;
1313 if (bits (insn, 12, 15) != destreg)
1314 return pc;
1315 }
1316 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1317 on arm. */
1318 if (is_thumb)
1319 return pc + offset + 4;
1320 else
1321 return pc + offset + 8;
1322 }
1323
1324 /* Advance the PC across any function entry prologue instructions to
1325 reach some "real" code.
1326
1327 The APCS (ARM Procedure Call Standard) defines the following
1328 prologue:
1329
1330 mov ip, sp
1331 [stmfd sp!, {a1,a2,a3,a4}]
1332 stmfd sp!, {...,fp,ip,lr,pc}
1333 [stfe f7, [sp, #-12]!]
1334 [stfe f6, [sp, #-12]!]
1335 [stfe f5, [sp, #-12]!]
1336 [stfe f4, [sp, #-12]!]
1337 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1338
1339 static CORE_ADDR
1340 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1341 {
1342 CORE_ADDR func_addr, limit_pc;
1343
1344 /* See if we can determine the end of the prologue via the symbol table.
1345 If so, then return either PC, or the PC after the prologue, whichever
1346 is greater. */
1347 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1348 {
1349 CORE_ADDR post_prologue_pc
1350 = skip_prologue_using_sal (gdbarch, func_addr);
1351 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1352
1353 if (post_prologue_pc)
1354 post_prologue_pc
1355 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1356
1357
1358 /* GCC always emits a line note before the prologue and another
1359 one after, even if the two are at the same address or on the
1360 same line. Take advantage of this so that we do not need to
1361 know every instruction that might appear in the prologue. We
1362 will have producer information for most binaries; if it is
1363 missing (e.g. for -gstabs), assuming the GNU tools. */
1364 if (post_prologue_pc
1365 && (cust == NULL
1366 || COMPUNIT_PRODUCER (cust) == NULL
1367 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1368 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1369 return post_prologue_pc;
1370
1371 if (post_prologue_pc != 0)
1372 {
1373 CORE_ADDR analyzed_limit;
1374
1375 /* For non-GCC compilers, make sure the entire line is an
1376 acceptable prologue; GDB will round this function's
1377 return value up to the end of the following line so we
1378 can not skip just part of a line (and we do not want to).
1379
1380 RealView does not treat the prologue specially, but does
1381 associate prologue code with the opening brace; so this
1382 lets us skip the first line if we think it is the opening
1383 brace. */
1384 if (arm_pc_is_thumb (gdbarch, func_addr))
1385 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1386 post_prologue_pc, NULL);
1387 else
1388 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1389 post_prologue_pc, NULL);
1390
1391 if (analyzed_limit != post_prologue_pc)
1392 return func_addr;
1393
1394 return post_prologue_pc;
1395 }
1396 }
1397
1398 /* Can't determine prologue from the symbol table, need to examine
1399 instructions. */
1400
1401 /* Find an upper limit on the function prologue using the debug
1402 information. If the debug information could not be used to provide
1403 that bound, then use an arbitrary large number as the upper bound. */
1404 /* Like arm_scan_prologue, stop no later than pc + 64. */
1405 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1406 if (limit_pc == 0)
1407 limit_pc = pc + 64; /* Magic. */
1408
1409
1410 /* Check if this is Thumb code. */
1411 if (arm_pc_is_thumb (gdbarch, pc))
1412 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1413 else
1414 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1415 }
1416
1417 /* *INDENT-OFF* */
1418 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1419 This function decodes a Thumb function prologue to determine:
1420 1) the size of the stack frame
1421 2) which registers are saved on it
1422 3) the offsets of saved regs
1423 4) the offset from the stack pointer to the frame pointer
1424
1425 A typical Thumb function prologue would create this stack frame
1426 (offsets relative to FP)
1427 old SP -> 24 stack parameters
1428 20 LR
1429 16 R7
1430 R7 -> 0 local variables (16 bytes)
1431 SP -> -12 additional stack space (12 bytes)
1432 The frame size would thus be 36 bytes, and the frame offset would be
1433 12 bytes. The frame register is R7.
1434
1435 The comments for thumb_skip_prolog() describe the algorithm we use
1436 to detect the end of the prolog. */
1437 /* *INDENT-ON* */
1438
1439 static void
1440 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1441 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1442 {
1443 CORE_ADDR prologue_start;
1444 CORE_ADDR prologue_end;
1445
1446 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1447 &prologue_end))
1448 {
1449 /* See comment in arm_scan_prologue for an explanation of
1450 this heuristics. */
1451 if (prologue_end > prologue_start + 64)
1452 {
1453 prologue_end = prologue_start + 64;
1454 }
1455 }
1456 else
1457 /* We're in the boondocks: we have no idea where the start of the
1458 function is. */
1459 return;
1460
1461 prologue_end = std::min (prologue_end, prev_pc);
1462
1463 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1464 }
1465
1466 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1467 otherwise. */
1468
1469 static int
1470 arm_instruction_restores_sp (unsigned int insn)
1471 {
1472 if (bits (insn, 28, 31) != INST_NV)
1473 {
1474 if ((insn & 0x0df0f000) == 0x0080d000
1475 /* ADD SP (register or immediate). */
1476 || (insn & 0x0df0f000) == 0x0040d000
1477 /* SUB SP (register or immediate). */
1478 || (insn & 0x0ffffff0) == 0x01a0d000
1479 /* MOV SP. */
1480 || (insn & 0x0fff0000) == 0x08bd0000
1481 /* POP (LDMIA). */
1482 || (insn & 0x0fff0000) == 0x049d0000)
1483 /* POP of a single register. */
1484 return 1;
1485 }
1486
1487 return 0;
1488 }
1489
1490 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1491 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1492 fill it in. Return the first address not recognized as a prologue
1493 instruction.
1494
1495 We recognize all the instructions typically found in ARM prologues,
1496 plus harmless instructions which can be skipped (either for analysis
1497 purposes, or a more restrictive set that can be skipped when finding
1498 the end of the prologue). */
1499
1500 static CORE_ADDR
1501 arm_analyze_prologue (struct gdbarch *gdbarch,
1502 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1503 struct arm_prologue_cache *cache)
1504 {
1505 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1506 int regno;
1507 CORE_ADDR offset, current_pc;
1508 pv_t regs[ARM_FPS_REGNUM];
1509 CORE_ADDR unrecognized_pc = 0;
1510
1511 /* Search the prologue looking for instructions that set up the
1512 frame pointer, adjust the stack pointer, and save registers.
1513
1514 Be careful, however, and if it doesn't look like a prologue,
1515 don't try to scan it. If, for instance, a frameless function
1516 begins with stmfd sp!, then we will tell ourselves there is
1517 a frame, which will confuse stack traceback, as well as "finish"
1518 and other operations that rely on a knowledge of the stack
1519 traceback. */
1520
1521 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1522 regs[regno] = pv_register (regno, 0);
1523 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1524
1525 for (current_pc = prologue_start;
1526 current_pc < prologue_end;
1527 current_pc += 4)
1528 {
1529 unsigned int insn
1530 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1531
1532 if (insn == 0xe1a0c00d) /* mov ip, sp */
1533 {
1534 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1535 continue;
1536 }
1537 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1538 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1539 {
1540 unsigned imm = insn & 0xff; /* immediate value */
1541 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1542 int rd = bits (insn, 12, 15);
1543 imm = (imm >> rot) | (imm << (32 - rot));
1544 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1545 continue;
1546 }
1547 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1548 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1549 {
1550 unsigned imm = insn & 0xff; /* immediate value */
1551 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1552 int rd = bits (insn, 12, 15);
1553 imm = (imm >> rot) | (imm << (32 - rot));
1554 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1555 continue;
1556 }
1557 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1558 [sp, #-4]! */
1559 {
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1563 stack.store (regs[ARM_SP_REGNUM], 4,
1564 regs[bits (insn, 12, 15)]);
1565 continue;
1566 }
1567 else if ((insn & 0xffff0000) == 0xe92d0000)
1568 /* stmfd sp!, {..., fp, ip, lr, pc}
1569 or
1570 stmfd sp!, {a1, a2, a3, a4} */
1571 {
1572 int mask = insn & 0xffff;
1573
1574 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1575 break;
1576
1577 /* Calculate offsets of saved registers. */
1578 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1579 if (mask & (1 << regno))
1580 {
1581 regs[ARM_SP_REGNUM]
1582 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1583 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1584 }
1585 }
1586 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1587 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1588 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1589 {
1590 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 continue;
1592 }
1593 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1594 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1595 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1596 {
1597 /* No need to add this to saved_regs -- it's just an arg reg. */
1598 continue;
1599 }
1600 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1601 { registers } */
1602 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1603 {
1604 /* No need to add this to saved_regs -- it's just arg regs. */
1605 continue;
1606 }
1607 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1608 {
1609 unsigned imm = insn & 0xff; /* immediate value */
1610 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1611 imm = (imm >> rot) | (imm << (32 - rot));
1612 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1613 }
1614 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1615 {
1616 unsigned imm = insn & 0xff; /* immediate value */
1617 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1618 imm = (imm >> rot) | (imm << (32 - rot));
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1620 }
1621 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1622 [sp, -#c]! */
1623 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1624 {
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1629 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1630 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1631 }
1632 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1633 [sp!] */
1634 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1635 {
1636 int n_saved_fp_regs;
1637 unsigned int fp_start_reg, fp_bound_reg;
1638
1639 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1640 break;
1641
1642 if ((insn & 0x800) == 0x800) /* N0 is set */
1643 {
1644 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1645 n_saved_fp_regs = 3;
1646 else
1647 n_saved_fp_regs = 1;
1648 }
1649 else
1650 {
1651 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1652 n_saved_fp_regs = 2;
1653 else
1654 n_saved_fp_regs = 4;
1655 }
1656
1657 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1658 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1659 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1660 {
1661 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1662 stack.store (regs[ARM_SP_REGNUM], 12,
1663 regs[fp_start_reg++]);
1664 }
1665 }
1666 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1667 {
1668 /* Allow some special function calls when skipping the
1669 prologue; GCC generates these before storing arguments to
1670 the stack. */
1671 CORE_ADDR dest = BranchDest (current_pc, insn);
1672
1673 if (skip_prologue_function (gdbarch, dest, 0))
1674 continue;
1675 else
1676 break;
1677 }
1678 else if ((insn & 0xf0000000) != 0xe0000000)
1679 break; /* Condition not true, exit early. */
1680 else if (arm_instruction_changes_pc (insn))
1681 /* Don't scan past anything that might change control flow. */
1682 break;
1683 else if (arm_instruction_restores_sp (insn))
1684 {
1685 /* Don't scan past the epilogue. */
1686 break;
1687 }
1688 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1689 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1690 /* Ignore block loads from the stack, potentially copying
1691 parameters from memory. */
1692 continue;
1693 else if ((insn & 0xfc500000) == 0xe4100000
1694 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1695 /* Similarly ignore single loads from the stack. */
1696 continue;
1697 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1698 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1699 register instead of the stack. */
1700 continue;
1701 else
1702 {
1703 /* The optimizer might shove anything into the prologue, if
1704 we build up cache (cache != NULL) from scanning prologue,
1705 we just skip what we don't recognize and scan further to
1706 make cache as complete as possible. However, if we skip
1707 prologue, we'll stop immediately on unrecognized
1708 instruction. */
1709 unrecognized_pc = current_pc;
1710 if (cache != NULL)
1711 continue;
1712 else
1713 break;
1714 }
1715 }
1716
1717 if (unrecognized_pc == 0)
1718 unrecognized_pc = current_pc;
1719
1720 if (cache)
1721 {
1722 int framereg, framesize;
1723
1724 /* The frame size is just the distance from the frame register
1725 to the original stack pointer. */
1726 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1727 {
1728 /* Frame pointer is fp. */
1729 framereg = ARM_FP_REGNUM;
1730 framesize = -regs[ARM_FP_REGNUM].k;
1731 }
1732 else
1733 {
1734 /* Try the stack pointer... this is a bit desperate. */
1735 framereg = ARM_SP_REGNUM;
1736 framesize = -regs[ARM_SP_REGNUM].k;
1737 }
1738
1739 cache->framereg = framereg;
1740 cache->framesize = framesize;
1741
1742 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1743 if (stack.find_reg (gdbarch, regno, &offset))
1744 cache->saved_regs[regno].addr = offset;
1745 }
1746
1747 if (arm_debug)
1748 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1749 paddress (gdbarch, unrecognized_pc));
1750
1751 return unrecognized_pc;
1752 }
1753
1754 static void
1755 arm_scan_prologue (struct frame_info *this_frame,
1756 struct arm_prologue_cache *cache)
1757 {
1758 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1759 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1760 CORE_ADDR prologue_start, prologue_end;
1761 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1762 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1763
1764 /* Assume there is no frame until proven otherwise. */
1765 cache->framereg = ARM_SP_REGNUM;
1766 cache->framesize = 0;
1767
1768 /* Check for Thumb prologue. */
1769 if (arm_frame_is_thumb (this_frame))
1770 {
1771 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1772 return;
1773 }
1774
1775 /* Find the function prologue. If we can't find the function in
1776 the symbol table, peek in the stack frame to find the PC. */
1777 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1778 &prologue_end))
1779 {
1780 /* One way to find the end of the prologue (which works well
1781 for unoptimized code) is to do the following:
1782
1783 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1784
1785 if (sal.line == 0)
1786 prologue_end = prev_pc;
1787 else if (sal.end < prologue_end)
1788 prologue_end = sal.end;
1789
1790 This mechanism is very accurate so long as the optimizer
1791 doesn't move any instructions from the function body into the
1792 prologue. If this happens, sal.end will be the last
1793 instruction in the first hunk of prologue code just before
1794 the first instruction that the scheduler has moved from
1795 the body to the prologue.
1796
1797 In order to make sure that we scan all of the prologue
1798 instructions, we use a slightly less accurate mechanism which
1799 may scan more than necessary. To help compensate for this
1800 lack of accuracy, the prologue scanning loop below contains
1801 several clauses which'll cause the loop to terminate early if
1802 an implausible prologue instruction is encountered.
1803
1804 The expression
1805
1806 prologue_start + 64
1807
1808 is a suitable endpoint since it accounts for the largest
1809 possible prologue plus up to five instructions inserted by
1810 the scheduler. */
1811
1812 if (prologue_end > prologue_start + 64)
1813 {
1814 prologue_end = prologue_start + 64; /* See above. */
1815 }
1816 }
1817 else
1818 {
1819 /* We have no symbol information. Our only option is to assume this
1820 function has a standard stack frame and the normal frame register.
1821 Then, we can find the value of our frame pointer on entrance to
1822 the callee (or at the present moment if this is the innermost frame).
1823 The value stored there should be the address of the stmfd + 8. */
1824 CORE_ADDR frame_loc;
1825 ULONGEST return_value;
1826
1827 /* AAPCS does not use a frame register, so we can abort here. */
1828 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1829 return;
1830
1831 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1832 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1833 &return_value))
1834 return;
1835 else
1836 {
1837 prologue_start = gdbarch_addr_bits_remove
1838 (gdbarch, return_value) - 8;
1839 prologue_end = prologue_start + 64; /* See above. */
1840 }
1841 }
1842
1843 if (prev_pc < prologue_end)
1844 prologue_end = prev_pc;
1845
1846 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1847 }
1848
1849 static struct arm_prologue_cache *
1850 arm_make_prologue_cache (struct frame_info *this_frame)
1851 {
1852 int reg;
1853 struct arm_prologue_cache *cache;
1854 CORE_ADDR unwound_fp;
1855
1856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1858
1859 arm_scan_prologue (this_frame, cache);
1860
1861 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1862 if (unwound_fp == 0)
1863 return cache;
1864
1865 cache->prev_sp = unwound_fp + cache->framesize;
1866
1867 /* Calculate actual addresses of saved registers using offsets
1868 determined by arm_scan_prologue. */
1869 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1870 if (trad_frame_addr_p (cache->saved_regs, reg))
1871 cache->saved_regs[reg].addr += cache->prev_sp;
1872
1873 return cache;
1874 }
1875
1876 /* Implementation of the stop_reason hook for arm_prologue frames. */
1877
1878 static enum unwind_stop_reason
1879 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1880 void **this_cache)
1881 {
1882 struct arm_prologue_cache *cache;
1883 CORE_ADDR pc;
1884
1885 if (*this_cache == NULL)
1886 *this_cache = arm_make_prologue_cache (this_frame);
1887 cache = (struct arm_prologue_cache *) *this_cache;
1888
1889 /* This is meant to halt the backtrace at "_start". */
1890 pc = get_frame_pc (this_frame);
1891 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1892 return UNWIND_OUTERMOST;
1893
1894 /* If we've hit a wall, stop. */
1895 if (cache->prev_sp == 0)
1896 return UNWIND_OUTERMOST;
1897
1898 return UNWIND_NO_REASON;
1899 }
1900
1901 /* Our frame ID for a normal frame is the current function's starting PC
1902 and the caller's SP when we were called. */
1903
1904 static void
1905 arm_prologue_this_id (struct frame_info *this_frame,
1906 void **this_cache,
1907 struct frame_id *this_id)
1908 {
1909 struct arm_prologue_cache *cache;
1910 struct frame_id id;
1911 CORE_ADDR pc, func;
1912
1913 if (*this_cache == NULL)
1914 *this_cache = arm_make_prologue_cache (this_frame);
1915 cache = (struct arm_prologue_cache *) *this_cache;
1916
1917 /* Use function start address as part of the frame ID. If we cannot
1918 identify the start address (due to missing symbol information),
1919 fall back to just using the current PC. */
1920 pc = get_frame_pc (this_frame);
1921 func = get_frame_func (this_frame);
1922 if (!func)
1923 func = pc;
1924
1925 id = frame_id_build (cache->prev_sp, func);
1926 *this_id = id;
1927 }
1928
1929 static struct value *
1930 arm_prologue_prev_register (struct frame_info *this_frame,
1931 void **this_cache,
1932 int prev_regnum)
1933 {
1934 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1935 struct arm_prologue_cache *cache;
1936
1937 if (*this_cache == NULL)
1938 *this_cache = arm_make_prologue_cache (this_frame);
1939 cache = (struct arm_prologue_cache *) *this_cache;
1940
1941 /* If we are asked to unwind the PC, then we need to return the LR
1942 instead. The prologue may save PC, but it will point into this
1943 frame's prologue, not the next frame's resume location. Also
1944 strip the saved T bit. A valid LR may have the low bit set, but
1945 a valid PC never does. */
1946 if (prev_regnum == ARM_PC_REGNUM)
1947 {
1948 CORE_ADDR lr;
1949
1950 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1951 return frame_unwind_got_constant (this_frame, prev_regnum,
1952 arm_addr_bits_remove (gdbarch, lr));
1953 }
1954
1955 /* SP is generally not saved to the stack, but this frame is
1956 identified by the next frame's stack pointer at the time of the call.
1957 The value was already reconstructed into PREV_SP. */
1958 if (prev_regnum == ARM_SP_REGNUM)
1959 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1960
1961 /* The CPSR may have been changed by the call instruction and by the
1962 called function. The only bit we can reconstruct is the T bit,
1963 by checking the low bit of LR as of the call. This is a reliable
1964 indicator of Thumb-ness except for some ARM v4T pre-interworking
1965 Thumb code, which could get away with a clear low bit as long as
1966 the called function did not use bx. Guess that all other
1967 bits are unchanged; the condition flags are presumably lost,
1968 but the processor status is likely valid. */
1969 if (prev_regnum == ARM_PS_REGNUM)
1970 {
1971 CORE_ADDR lr, cpsr;
1972 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1973
1974 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1975 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1976 if (IS_THUMB_ADDR (lr))
1977 cpsr |= t_bit;
1978 else
1979 cpsr &= ~t_bit;
1980 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1981 }
1982
1983 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1984 prev_regnum);
1985 }
1986
1987 struct frame_unwind arm_prologue_unwind = {
1988 NORMAL_FRAME,
1989 arm_prologue_unwind_stop_reason,
1990 arm_prologue_this_id,
1991 arm_prologue_prev_register,
1992 NULL,
1993 default_frame_sniffer
1994 };
1995
1996 /* Maintain a list of ARM exception table entries per objfile, similar to the
1997 list of mapping symbols. We only cache entries for standard ARM-defined
1998 personality routines; the cache will contain only the frame unwinding
1999 instructions associated with the entry (not the descriptors). */
2000
2001 struct arm_exidx_entry
2002 {
2003 CORE_ADDR addr;
2004 gdb_byte *entry;
2005
2006 bool operator< (const arm_exidx_entry &other) const
2007 {
2008 return addr < other.addr;
2009 }
2010 };
2011
2012 struct arm_exidx_data
2013 {
2014 std::vector<std::vector<arm_exidx_entry>> section_maps;
2015 };
2016
2017 /* Per-BFD key to store exception handling information. */
2018 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2019
2020 static struct obj_section *
2021 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2022 {
2023 struct obj_section *osect;
2024
2025 ALL_OBJFILE_OSECTIONS (objfile, osect)
2026 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2027 {
2028 bfd_vma start, size;
2029 start = bfd_section_vma (osect->the_bfd_section);
2030 size = bfd_section_size (osect->the_bfd_section);
2031
2032 if (start <= vma && vma < start + size)
2033 return osect;
2034 }
2035
2036 return NULL;
2037 }
2038
2039 /* Parse contents of exception table and exception index sections
2040 of OBJFILE, and fill in the exception table entry cache.
2041
2042 For each entry that refers to a standard ARM-defined personality
2043 routine, extract the frame unwinding instructions (from either
2044 the index or the table section). The unwinding instructions
2045 are normalized by:
2046 - extracting them from the rest of the table data
2047 - converting to host endianness
2048 - appending the implicit 0xb0 ("Finish") code
2049
2050 The extracted and normalized instructions are stored for later
2051 retrieval by the arm_find_exidx_entry routine. */
2052
2053 static void
2054 arm_exidx_new_objfile (struct objfile *objfile)
2055 {
2056 struct arm_exidx_data *data;
2057 asection *exidx, *extab;
2058 bfd_vma exidx_vma = 0, extab_vma = 0;
2059 LONGEST i;
2060
2061 /* If we've already touched this file, do nothing. */
2062 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2063 return;
2064
2065 /* Read contents of exception table and index. */
2066 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2067 gdb::byte_vector exidx_data;
2068 if (exidx)
2069 {
2070 exidx_vma = bfd_section_vma (exidx);
2071 exidx_data.resize (bfd_section_size (exidx));
2072
2073 if (!bfd_get_section_contents (objfile->obfd, exidx,
2074 exidx_data.data (), 0,
2075 exidx_data.size ()))
2076 return;
2077 }
2078
2079 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2080 gdb::byte_vector extab_data;
2081 if (extab)
2082 {
2083 extab_vma = bfd_section_vma (extab);
2084 extab_data.resize (bfd_section_size (extab));
2085
2086 if (!bfd_get_section_contents (objfile->obfd, extab,
2087 extab_data.data (), 0,
2088 extab_data.size ()))
2089 return;
2090 }
2091
2092 /* Allocate exception table data structure. */
2093 data = arm_exidx_data_key.emplace (objfile->obfd);
2094 data->section_maps.resize (objfile->obfd->section_count);
2095
2096 /* Fill in exception table. */
2097 for (i = 0; i < exidx_data.size () / 8; i++)
2098 {
2099 struct arm_exidx_entry new_exidx_entry;
2100 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2101 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2102 exidx_data.data () + i * 8 + 4);
2103 bfd_vma addr = 0, word = 0;
2104 int n_bytes = 0, n_words = 0;
2105 struct obj_section *sec;
2106 gdb_byte *entry = NULL;
2107
2108 /* Extract address of start of function. */
2109 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2110 idx += exidx_vma + i * 8;
2111
2112 /* Find section containing function and compute section offset. */
2113 sec = arm_obj_section_from_vma (objfile, idx);
2114 if (sec == NULL)
2115 continue;
2116 idx -= bfd_section_vma (sec->the_bfd_section);
2117
2118 /* Determine address of exception table entry. */
2119 if (val == 1)
2120 {
2121 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2122 }
2123 else if ((val & 0xff000000) == 0x80000000)
2124 {
2125 /* Exception table entry embedded in .ARM.exidx
2126 -- must be short form. */
2127 word = val;
2128 n_bytes = 3;
2129 }
2130 else if (!(val & 0x80000000))
2131 {
2132 /* Exception table entry in .ARM.extab. */
2133 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2134 addr += exidx_vma + i * 8 + 4;
2135
2136 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2137 {
2138 word = bfd_h_get_32 (objfile->obfd,
2139 extab_data.data () + addr - extab_vma);
2140 addr += 4;
2141
2142 if ((word & 0xff000000) == 0x80000000)
2143 {
2144 /* Short form. */
2145 n_bytes = 3;
2146 }
2147 else if ((word & 0xff000000) == 0x81000000
2148 || (word & 0xff000000) == 0x82000000)
2149 {
2150 /* Long form. */
2151 n_bytes = 2;
2152 n_words = ((word >> 16) & 0xff);
2153 }
2154 else if (!(word & 0x80000000))
2155 {
2156 bfd_vma pers;
2157 struct obj_section *pers_sec;
2158 int gnu_personality = 0;
2159
2160 /* Custom personality routine. */
2161 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2162 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2163
2164 /* Check whether we've got one of the variants of the
2165 GNU personality routines. */
2166 pers_sec = arm_obj_section_from_vma (objfile, pers);
2167 if (pers_sec)
2168 {
2169 static const char *personality[] =
2170 {
2171 "__gcc_personality_v0",
2172 "__gxx_personality_v0",
2173 "__gcj_personality_v0",
2174 "__gnu_objc_personality_v0",
2175 NULL
2176 };
2177
2178 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2179 int k;
2180
2181 for (k = 0; personality[k]; k++)
2182 if (lookup_minimal_symbol_by_pc_name
2183 (pc, personality[k], objfile))
2184 {
2185 gnu_personality = 1;
2186 break;
2187 }
2188 }
2189
2190 /* If so, the next word contains a word count in the high
2191 byte, followed by the same unwind instructions as the
2192 pre-defined forms. */
2193 if (gnu_personality
2194 && addr + 4 <= extab_vma + extab_data.size ())
2195 {
2196 word = bfd_h_get_32 (objfile->obfd,
2197 (extab_data.data ()
2198 + addr - extab_vma));
2199 addr += 4;
2200 n_bytes = 3;
2201 n_words = ((word >> 24) & 0xff);
2202 }
2203 }
2204 }
2205 }
2206
2207 /* Sanity check address. */
2208 if (n_words)
2209 if (addr < extab_vma
2210 || addr + 4 * n_words > extab_vma + extab_data.size ())
2211 n_words = n_bytes = 0;
2212
2213 /* The unwind instructions reside in WORD (only the N_BYTES least
2214 significant bytes are valid), followed by N_WORDS words in the
2215 extab section starting at ADDR. */
2216 if (n_bytes || n_words)
2217 {
2218 gdb_byte *p = entry
2219 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2220 n_bytes + n_words * 4 + 1);
2221
2222 while (n_bytes--)
2223 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2224
2225 while (n_words--)
2226 {
2227 word = bfd_h_get_32 (objfile->obfd,
2228 extab_data.data () + addr - extab_vma);
2229 addr += 4;
2230
2231 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2232 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2233 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2234 *p++ = (gdb_byte) (word & 0xff);
2235 }
2236
2237 /* Implied "Finish" to terminate the list. */
2238 *p++ = 0xb0;
2239 }
2240
2241 /* Push entry onto vector. They are guaranteed to always
2242 appear in order of increasing addresses. */
2243 new_exidx_entry.addr = idx;
2244 new_exidx_entry.entry = entry;
2245 data->section_maps[sec->the_bfd_section->index].push_back
2246 (new_exidx_entry);
2247 }
2248 }
2249
2250 /* Search for the exception table entry covering MEMADDR. If one is found,
2251 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2252 set *START to the start of the region covered by this entry. */
2253
2254 static gdb_byte *
2255 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2256 {
2257 struct obj_section *sec;
2258
2259 sec = find_pc_section (memaddr);
2260 if (sec != NULL)
2261 {
2262 struct arm_exidx_data *data;
2263 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2264
2265 data = arm_exidx_data_key.get (sec->objfile->obfd);
2266 if (data != NULL)
2267 {
2268 std::vector<arm_exidx_entry> &map
2269 = data->section_maps[sec->the_bfd_section->index];
2270 if (!map.empty ())
2271 {
2272 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2273
2274 /* std::lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < map.end ())
2279 {
2280 if (idx->addr == map_key.addr)
2281 {
2282 if (start)
2283 *start = idx->addr + obj_section_addr (sec);
2284 return idx->entry;
2285 }
2286 }
2287
2288 if (idx > map.begin ())
2289 {
2290 idx = idx - 1;
2291 if (start)
2292 *start = idx->addr + obj_section_addr (sec);
2293 return idx->entry;
2294 }
2295 }
2296 }
2297 }
2298
2299 return NULL;
2300 }
2301
2302 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2303 instruction list from the ARM exception table entry ENTRY, allocate and
2304 return a prologue cache structure describing how to unwind this frame.
2305
2306 Return NULL if the unwinding instruction list contains a "spare",
2307 "reserved" or "refuse to unwind" instruction as defined in section
2308 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2309 for the ARM Architecture" document. */
2310
2311 static struct arm_prologue_cache *
2312 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2313 {
2314 CORE_ADDR vsp = 0;
2315 int vsp_valid = 0;
2316
2317 struct arm_prologue_cache *cache;
2318 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2319 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2320
2321 for (;;)
2322 {
2323 gdb_byte insn;
2324
2325 /* Whenever we reload SP, we actually have to retrieve its
2326 actual value in the current frame. */
2327 if (!vsp_valid)
2328 {
2329 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2330 {
2331 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2332 vsp = get_frame_register_unsigned (this_frame, reg);
2333 }
2334 else
2335 {
2336 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2337 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2338 }
2339
2340 vsp_valid = 1;
2341 }
2342
2343 /* Decode next unwind instruction. */
2344 insn = *entry++;
2345
2346 if ((insn & 0xc0) == 0)
2347 {
2348 int offset = insn & 0x3f;
2349 vsp += (offset << 2) + 4;
2350 }
2351 else if ((insn & 0xc0) == 0x40)
2352 {
2353 int offset = insn & 0x3f;
2354 vsp -= (offset << 2) + 4;
2355 }
2356 else if ((insn & 0xf0) == 0x80)
2357 {
2358 int mask = ((insn & 0xf) << 8) | *entry++;
2359 int i;
2360
2361 /* The special case of an all-zero mask identifies
2362 "Refuse to unwind". We return NULL to fall back
2363 to the prologue analyzer. */
2364 if (mask == 0)
2365 return NULL;
2366
2367 /* Pop registers r4..r15 under mask. */
2368 for (i = 0; i < 12; i++)
2369 if (mask & (1 << i))
2370 {
2371 cache->saved_regs[4 + i].addr = vsp;
2372 vsp += 4;
2373 }
2374
2375 /* Special-case popping SP -- we need to reload vsp. */
2376 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2377 vsp_valid = 0;
2378 }
2379 else if ((insn & 0xf0) == 0x90)
2380 {
2381 int reg = insn & 0xf;
2382
2383 /* Reserved cases. */
2384 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2385 return NULL;
2386
2387 /* Set SP from another register and mark VSP for reload. */
2388 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2389 vsp_valid = 0;
2390 }
2391 else if ((insn & 0xf0) == 0xa0)
2392 {
2393 int count = insn & 0x7;
2394 int pop_lr = (insn & 0x8) != 0;
2395 int i;
2396
2397 /* Pop r4..r[4+count]. */
2398 for (i = 0; i <= count; i++)
2399 {
2400 cache->saved_regs[4 + i].addr = vsp;
2401 vsp += 4;
2402 }
2403
2404 /* If indicated by flag, pop LR as well. */
2405 if (pop_lr)
2406 {
2407 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2408 vsp += 4;
2409 }
2410 }
2411 else if (insn == 0xb0)
2412 {
2413 /* We could only have updated PC by popping into it; if so, it
2414 will show up as address. Otherwise, copy LR into PC. */
2415 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2416 cache->saved_regs[ARM_PC_REGNUM]
2417 = cache->saved_regs[ARM_LR_REGNUM];
2418
2419 /* We're done. */
2420 break;
2421 }
2422 else if (insn == 0xb1)
2423 {
2424 int mask = *entry++;
2425 int i;
2426
2427 /* All-zero mask and mask >= 16 is "spare". */
2428 if (mask == 0 || mask >= 16)
2429 return NULL;
2430
2431 /* Pop r0..r3 under mask. */
2432 for (i = 0; i < 4; i++)
2433 if (mask & (1 << i))
2434 {
2435 cache->saved_regs[i].addr = vsp;
2436 vsp += 4;
2437 }
2438 }
2439 else if (insn == 0xb2)
2440 {
2441 ULONGEST offset = 0;
2442 unsigned shift = 0;
2443
2444 do
2445 {
2446 offset |= (*entry & 0x7f) << shift;
2447 shift += 7;
2448 }
2449 while (*entry++ & 0x80);
2450
2451 vsp += 0x204 + (offset << 2);
2452 }
2453 else if (insn == 0xb3)
2454 {
2455 int start = *entry >> 4;
2456 int count = (*entry++) & 0xf;
2457 int i;
2458
2459 /* Only registers D0..D15 are valid here. */
2460 if (start + count >= 16)
2461 return NULL;
2462
2463 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2464 for (i = 0; i <= count; i++)
2465 {
2466 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2467 vsp += 8;
2468 }
2469
2470 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2471 vsp += 4;
2472 }
2473 else if ((insn & 0xf8) == 0xb8)
2474 {
2475 int count = insn & 0x7;
2476 int i;
2477
2478 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if (insn == 0xc6)
2489 {
2490 int start = *entry >> 4;
2491 int count = (*entry++) & 0xf;
2492 int i;
2493
2494 /* Only registers WR0..WR15 are valid. */
2495 if (start + count >= 16)
2496 return NULL;
2497
2498 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2499 for (i = 0; i <= count; i++)
2500 {
2501 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2502 vsp += 8;
2503 }
2504 }
2505 else if (insn == 0xc7)
2506 {
2507 int mask = *entry++;
2508 int i;
2509
2510 /* All-zero mask and mask >= 16 is "spare". */
2511 if (mask == 0 || mask >= 16)
2512 return NULL;
2513
2514 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2515 for (i = 0; i < 4; i++)
2516 if (mask & (1 << i))
2517 {
2518 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2519 vsp += 4;
2520 }
2521 }
2522 else if ((insn & 0xf8) == 0xc0)
2523 {
2524 int count = insn & 0x7;
2525 int i;
2526
2527 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2528 for (i = 0; i <= count; i++)
2529 {
2530 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2531 vsp += 8;
2532 }
2533 }
2534 else if (insn == 0xc8)
2535 {
2536 int start = *entry >> 4;
2537 int count = (*entry++) & 0xf;
2538 int i;
2539
2540 /* Only registers D0..D31 are valid. */
2541 if (start + count >= 16)
2542 return NULL;
2543
2544 /* Pop VFP double-precision registers
2545 D[16+start]..D[16+start+count]. */
2546 for (i = 0; i <= count; i++)
2547 {
2548 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2549 vsp += 8;
2550 }
2551 }
2552 else if (insn == 0xc9)
2553 {
2554 int start = *entry >> 4;
2555 int count = (*entry++) & 0xf;
2556 int i;
2557
2558 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else if ((insn & 0xf8) == 0xd0)
2566 {
2567 int count = insn & 0x7;
2568 int i;
2569
2570 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2571 for (i = 0; i <= count; i++)
2572 {
2573 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2574 vsp += 8;
2575 }
2576 }
2577 else
2578 {
2579 /* Everything else is "spare". */
2580 return NULL;
2581 }
2582 }
2583
2584 /* If we restore SP from a register, assume this was the frame register.
2585 Otherwise just fall back to SP as frame register. */
2586 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2587 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2588 else
2589 cache->framereg = ARM_SP_REGNUM;
2590
2591 /* Determine offset to previous frame. */
2592 cache->framesize
2593 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2594
2595 /* We already got the previous SP. */
2596 cache->prev_sp = vsp;
2597
2598 return cache;
2599 }
2600
2601 /* Unwinding via ARM exception table entries. Note that the sniffer
2602 already computes a filled-in prologue cache, which is then used
2603 with the same arm_prologue_this_id and arm_prologue_prev_register
2604 routines also used for prologue-parsing based unwinding. */
2605
2606 static int
2607 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2608 struct frame_info *this_frame,
2609 void **this_prologue_cache)
2610 {
2611 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2613 CORE_ADDR addr_in_block, exidx_region, func_start;
2614 struct arm_prologue_cache *cache;
2615 gdb_byte *entry;
2616
2617 /* See if we have an ARM exception table entry covering this address. */
2618 addr_in_block = get_frame_address_in_block (this_frame);
2619 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2620 if (!entry)
2621 return 0;
2622
2623 /* The ARM exception table does not describe unwind information
2624 for arbitrary PC values, but is guaranteed to be correct only
2625 at call sites. We have to decide here whether we want to use
2626 ARM exception table information for this frame, or fall back
2627 to using prologue parsing. (Note that if we have DWARF CFI,
2628 this sniffer isn't even called -- CFI is always preferred.)
2629
2630 Before we make this decision, however, we check whether we
2631 actually have *symbol* information for the current frame.
2632 If not, prologue parsing would not work anyway, so we might
2633 as well use the exception table and hope for the best. */
2634 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2635 {
2636 int exc_valid = 0;
2637
2638 /* If the next frame is "normal", we are at a call site in this
2639 frame, so exception information is guaranteed to be valid. */
2640 if (get_next_frame (this_frame)
2641 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2642 exc_valid = 1;
2643
2644 /* We also assume exception information is valid if we're currently
2645 blocked in a system call. The system library is supposed to
2646 ensure this, so that e.g. pthread cancellation works. */
2647 if (arm_frame_is_thumb (this_frame))
2648 {
2649 ULONGEST insn;
2650
2651 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2652 2, byte_order_for_code, &insn)
2653 && (insn & 0xff00) == 0xdf00 /* svc */)
2654 exc_valid = 1;
2655 }
2656 else
2657 {
2658 ULONGEST insn;
2659
2660 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2661 4, byte_order_for_code, &insn)
2662 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2663 exc_valid = 1;
2664 }
2665
2666 /* Bail out if we don't know that exception information is valid. */
2667 if (!exc_valid)
2668 return 0;
2669
2670 /* The ARM exception index does not mark the *end* of the region
2671 covered by the entry, and some functions will not have any entry.
2672 To correctly recognize the end of the covered region, the linker
2673 should have inserted dummy records with a CANTUNWIND marker.
2674
2675 Unfortunately, current versions of GNU ld do not reliably do
2676 this, and thus we may have found an incorrect entry above.
2677 As a (temporary) sanity check, we only use the entry if it
2678 lies *within* the bounds of the function. Note that this check
2679 might reject perfectly valid entries that just happen to cover
2680 multiple functions; therefore this check ought to be removed
2681 once the linker is fixed. */
2682 if (func_start > exidx_region)
2683 return 0;
2684 }
2685
2686 /* Decode the list of unwinding instructions into a prologue cache.
2687 Note that this may fail due to e.g. a "refuse to unwind" code. */
2688 cache = arm_exidx_fill_cache (this_frame, entry);
2689 if (!cache)
2690 return 0;
2691
2692 *this_prologue_cache = cache;
2693 return 1;
2694 }
2695
2696 struct frame_unwind arm_exidx_unwind = {
2697 NORMAL_FRAME,
2698 default_frame_unwind_stop_reason,
2699 arm_prologue_this_id,
2700 arm_prologue_prev_register,
2701 NULL,
2702 arm_exidx_unwind_sniffer
2703 };
2704
2705 static struct arm_prologue_cache *
2706 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2707 {
2708 struct arm_prologue_cache *cache;
2709 int reg;
2710
2711 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2712 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2713
2714 /* Still rely on the offset calculated from prologue. */
2715 arm_scan_prologue (this_frame, cache);
2716
2717 /* Since we are in epilogue, the SP has been restored. */
2718 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2719
2720 /* Calculate actual addresses of saved registers using offsets
2721 determined by arm_scan_prologue. */
2722 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2723 if (trad_frame_addr_p (cache->saved_regs, reg))
2724 cache->saved_regs[reg].addr += cache->prev_sp;
2725
2726 return cache;
2727 }
2728
2729 /* Implementation of function hook 'this_id' in
2730 'struct frame_uwnind' for epilogue unwinder. */
2731
2732 static void
2733 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2734 void **this_cache,
2735 struct frame_id *this_id)
2736 {
2737 struct arm_prologue_cache *cache;
2738 CORE_ADDR pc, func;
2739
2740 if (*this_cache == NULL)
2741 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2742 cache = (struct arm_prologue_cache *) *this_cache;
2743
2744 /* Use function start address as part of the frame ID. If we cannot
2745 identify the start address (due to missing symbol information),
2746 fall back to just using the current PC. */
2747 pc = get_frame_pc (this_frame);
2748 func = get_frame_func (this_frame);
2749 if (func == 0)
2750 func = pc;
2751
2752 (*this_id) = frame_id_build (cache->prev_sp, pc);
2753 }
2754
2755 /* Implementation of function hook 'prev_register' in
2756 'struct frame_uwnind' for epilogue unwinder. */
2757
2758 static struct value *
2759 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2760 void **this_cache, int regnum)
2761 {
2762 if (*this_cache == NULL)
2763 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2764
2765 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2766 }
2767
2768 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2769 CORE_ADDR pc);
2770 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2771 CORE_ADDR pc);
2772
2773 /* Implementation of function hook 'sniffer' in
2774 'struct frame_uwnind' for epilogue unwinder. */
2775
2776 static int
2777 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2778 struct frame_info *this_frame,
2779 void **this_prologue_cache)
2780 {
2781 if (frame_relative_level (this_frame) == 0)
2782 {
2783 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2784 CORE_ADDR pc = get_frame_pc (this_frame);
2785
2786 if (arm_frame_is_thumb (this_frame))
2787 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2788 else
2789 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2790 }
2791 else
2792 return 0;
2793 }
2794
2795 /* Frame unwinder from epilogue. */
2796
2797 static const struct frame_unwind arm_epilogue_frame_unwind =
2798 {
2799 NORMAL_FRAME,
2800 default_frame_unwind_stop_reason,
2801 arm_epilogue_frame_this_id,
2802 arm_epilogue_frame_prev_register,
2803 NULL,
2804 arm_epilogue_frame_sniffer,
2805 };
2806
2807 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2808 trampoline, return the target PC. Otherwise return 0.
2809
2810 void call0a (char c, short s, int i, long l) {}
2811
2812 int main (void)
2813 {
2814 (*pointer_to_call0a) (c, s, i, l);
2815 }
2816
2817 Instead of calling a stub library function _call_via_xx (xx is
2818 the register name), GCC may inline the trampoline in the object
2819 file as below (register r2 has the address of call0a).
2820
2821 .global main
2822 .type main, %function
2823 ...
2824 bl .L1
2825 ...
2826 .size main, .-main
2827
2828 .L1:
2829 bx r2
2830
2831 The trampoline 'bx r2' doesn't belong to main. */
2832
2833 static CORE_ADDR
2834 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2835 {
2836 /* The heuristics of recognizing such trampoline is that FRAME is
2837 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2838 if (arm_frame_is_thumb (frame))
2839 {
2840 gdb_byte buf[2];
2841
2842 if (target_read_memory (pc, buf, 2) == 0)
2843 {
2844 struct gdbarch *gdbarch = get_frame_arch (frame);
2845 enum bfd_endian byte_order_for_code
2846 = gdbarch_byte_order_for_code (gdbarch);
2847 uint16_t insn
2848 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2849
2850 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2851 {
2852 CORE_ADDR dest
2853 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2854
2855 /* Clear the LSB so that gdb core sets step-resume
2856 breakpoint at the right address. */
2857 return UNMAKE_THUMB_ADDR (dest);
2858 }
2859 }
2860 }
2861
2862 return 0;
2863 }
2864
2865 static struct arm_prologue_cache *
2866 arm_make_stub_cache (struct frame_info *this_frame)
2867 {
2868 struct arm_prologue_cache *cache;
2869
2870 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2871 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2872
2873 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2874
2875 return cache;
2876 }
2877
2878 /* Our frame ID for a stub frame is the current SP and LR. */
2879
2880 static void
2881 arm_stub_this_id (struct frame_info *this_frame,
2882 void **this_cache,
2883 struct frame_id *this_id)
2884 {
2885 struct arm_prologue_cache *cache;
2886
2887 if (*this_cache == NULL)
2888 *this_cache = arm_make_stub_cache (this_frame);
2889 cache = (struct arm_prologue_cache *) *this_cache;
2890
2891 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2892 }
2893
2894 static int
2895 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2896 struct frame_info *this_frame,
2897 void **this_prologue_cache)
2898 {
2899 CORE_ADDR addr_in_block;
2900 gdb_byte dummy[4];
2901 CORE_ADDR pc, start_addr;
2902 const char *name;
2903
2904 addr_in_block = get_frame_address_in_block (this_frame);
2905 pc = get_frame_pc (this_frame);
2906 if (in_plt_section (addr_in_block)
2907 /* We also use the stub winder if the target memory is unreadable
2908 to avoid having the prologue unwinder trying to read it. */
2909 || target_read_memory (pc, dummy, 4) != 0)
2910 return 1;
2911
2912 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2913 && arm_skip_bx_reg (this_frame, pc) != 0)
2914 return 1;
2915
2916 return 0;
2917 }
2918
2919 struct frame_unwind arm_stub_unwind = {
2920 NORMAL_FRAME,
2921 default_frame_unwind_stop_reason,
2922 arm_stub_this_id,
2923 arm_prologue_prev_register,
2924 NULL,
2925 arm_stub_unwind_sniffer
2926 };
2927
2928 /* Put here the code to store, into CACHE->saved_regs, the addresses
2929 of the saved registers of frame described by THIS_FRAME. CACHE is
2930 returned. */
2931
2932 static struct arm_prologue_cache *
2933 arm_m_exception_cache (struct frame_info *this_frame)
2934 {
2935 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2936 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2937 struct arm_prologue_cache *cache;
2938 CORE_ADDR lr;
2939 CORE_ADDR sp;
2940 CORE_ADDR unwound_sp;
2941 LONGEST xpsr;
2942 uint32_t exc_return;
2943 uint32_t process_stack_used;
2944 uint32_t extended_frame_used;
2945 uint32_t secure_stack_used;
2946
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2949
2950 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2951 describes which bits in LR that define which stack was used prior
2952 to the exception and if FPU is used (causing extended stack frame). */
2953
2954 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2955 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2956
2957 /* Check EXC_RETURN indicator bits. */
2958 exc_return = (((lr >> 28) & 0xf) == 0xf);
2959
2960 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2961 process_stack_used = ((lr & (1 << 2)) != 0);
2962 if (exc_return && process_stack_used)
2963 {
2964 /* Thread (process) stack used.
2965 Potentially this could be other register defined by target, but PSP
2966 can be considered a standard name for the "Process Stack Pointer".
2967 To be fully aware of system registers like MSP and PSP, these could
2968 be added to a separate XML arm-m-system-profile that is valid for
2969 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
2970 corefile off-line, then these registers must be defined by GDB,
2971 and also be included in the corefile regsets. */
2972
2973 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
2974 if (psp_regnum == -1)
2975 {
2976 /* Thread (process) stack could not be fetched,
2977 give warning and exit. */
2978
2979 warning (_("no PSP thread stack unwinding supported."));
2980
2981 /* Terminate any further stack unwinding by refer to self. */
2982 cache->prev_sp = sp;
2983 return cache;
2984 }
2985 else
2986 {
2987 /* Thread (process) stack used, use PSP as SP. */
2988 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
2989 }
2990 }
2991 else
2992 {
2993 /* Main stack used, use MSP as SP. */
2994 unwound_sp = sp;
2995 }
2996
2997 /* The hardware saves eight 32-bit words, comprising xPSR,
2998 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2999 "B1.5.6 Exception entry behavior" in
3000 "ARMv7-M Architecture Reference Manual". */
3001 cache->saved_regs[0].addr = unwound_sp;
3002 cache->saved_regs[1].addr = unwound_sp + 4;
3003 cache->saved_regs[2].addr = unwound_sp + 8;
3004 cache->saved_regs[3].addr = unwound_sp + 12;
3005 cache->saved_regs[ARM_IP_REGNUM].addr = unwound_sp + 16;
3006 cache->saved_regs[ARM_LR_REGNUM].addr = unwound_sp + 20;
3007 cache->saved_regs[ARM_PC_REGNUM].addr = unwound_sp + 24;
3008 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3009
3010 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3011 type used. */
3012 extended_frame_used = ((lr & (1 << 4)) == 0);
3013 if (exc_return && extended_frame_used)
3014 {
3015 int i;
3016 int fpu_regs_stack_offset;
3017
3018 /* This code does not take into account the lazy stacking, see "Lazy
3019 context save of FP state", in B1.5.7, also ARM AN298, supported
3020 by Cortex-M4F architecture.
3021 To fully handle this the FPCCR register (Floating-point Context
3022 Control Register) needs to be read out and the bits ASPEN and LSPEN
3023 could be checked to setup correct lazy stacked FP registers.
3024 This register is located at address 0xE000EF34. */
3025
3026 /* Extended stack frame type used. */
3027 fpu_regs_stack_offset = unwound_sp + 0x20;
3028 for (i = 0; i < 16; i++)
3029 {
3030 cache->saved_regs[ARM_D0_REGNUM + i].addr = fpu_regs_stack_offset;
3031 fpu_regs_stack_offset += 4;
3032 }
3033 cache->saved_regs[ARM_FPSCR_REGNUM].addr = unwound_sp + 0x60;
3034
3035 /* Offset 0x64 is reserved. */
3036 cache->prev_sp = unwound_sp + 0x68;
3037 }
3038 else
3039 {
3040 /* Standard stack frame type used. */
3041 cache->prev_sp = unwound_sp + 0x20;
3042 }
3043
3044 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3045 secure_stack_used = ((lr & (1 << 6)) != 0);
3046 if (exc_return && secure_stack_used)
3047 {
3048 /* ARMv8-M Exception and interrupt handling is not considered here.
3049 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3050 the Secure or Non-secure stack was used. To separate Secure and
3051 Non-secure stacks, processors that are based on the ARMv8-M
3052 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3053 In addition, a stack limit feature is provided using stack limit
3054 registers (accessible using MSR and MRS instructions) in Privileged
3055 level. */
3056 }
3057
3058 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3059 aligner between the top of the 32-byte stack frame and the
3060 previous context's stack pointer. */
3061 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3062 && (xpsr & (1 << 9)) != 0)
3063 cache->prev_sp += 4;
3064
3065 return cache;
3066 }
3067
3068 /* Implementation of function hook 'this_id' in
3069 'struct frame_uwnind'. */
3070
3071 static void
3072 arm_m_exception_this_id (struct frame_info *this_frame,
3073 void **this_cache,
3074 struct frame_id *this_id)
3075 {
3076 struct arm_prologue_cache *cache;
3077
3078 if (*this_cache == NULL)
3079 *this_cache = arm_m_exception_cache (this_frame);
3080 cache = (struct arm_prologue_cache *) *this_cache;
3081
3082 /* Our frame ID for a stub frame is the current SP and LR. */
3083 *this_id = frame_id_build (cache->prev_sp,
3084 get_frame_pc (this_frame));
3085 }
3086
3087 /* Implementation of function hook 'prev_register' in
3088 'struct frame_uwnind'. */
3089
3090 static struct value *
3091 arm_m_exception_prev_register (struct frame_info *this_frame,
3092 void **this_cache,
3093 int prev_regnum)
3094 {
3095 struct arm_prologue_cache *cache;
3096
3097 if (*this_cache == NULL)
3098 *this_cache = arm_m_exception_cache (this_frame);
3099 cache = (struct arm_prologue_cache *) *this_cache;
3100
3101 /* The value was already reconstructed into PREV_SP. */
3102 if (prev_regnum == ARM_SP_REGNUM)
3103 return frame_unwind_got_constant (this_frame, prev_regnum,
3104 cache->prev_sp);
3105
3106 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3107 prev_regnum);
3108 }
3109
3110 /* Implementation of function hook 'sniffer' in
3111 'struct frame_uwnind'. */
3112
3113 static int
3114 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3115 struct frame_info *this_frame,
3116 void **this_prologue_cache)
3117 {
3118 CORE_ADDR this_pc = get_frame_pc (this_frame);
3119
3120 /* No need to check is_m; this sniffer is only registered for
3121 M-profile architectures. */
3122
3123 /* Check if exception frame returns to a magic PC value. */
3124 return arm_m_addr_is_magic (this_pc);
3125 }
3126
3127 /* Frame unwinder for M-profile exceptions. */
3128
3129 struct frame_unwind arm_m_exception_unwind =
3130 {
3131 SIGTRAMP_FRAME,
3132 default_frame_unwind_stop_reason,
3133 arm_m_exception_this_id,
3134 arm_m_exception_prev_register,
3135 NULL,
3136 arm_m_exception_unwind_sniffer
3137 };
3138
3139 static CORE_ADDR
3140 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3141 {
3142 struct arm_prologue_cache *cache;
3143
3144 if (*this_cache == NULL)
3145 *this_cache = arm_make_prologue_cache (this_frame);
3146 cache = (struct arm_prologue_cache *) *this_cache;
3147
3148 return cache->prev_sp - cache->framesize;
3149 }
3150
3151 struct frame_base arm_normal_base = {
3152 &arm_prologue_unwind,
3153 arm_normal_frame_base,
3154 arm_normal_frame_base,
3155 arm_normal_frame_base
3156 };
3157
3158 static struct value *
3159 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3160 int regnum)
3161 {
3162 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3163 CORE_ADDR lr, cpsr;
3164 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3165
3166 switch (regnum)
3167 {
3168 case ARM_PC_REGNUM:
3169 /* The PC is normally copied from the return column, which
3170 describes saves of LR. However, that version may have an
3171 extra bit set to indicate Thumb state. The bit is not
3172 part of the PC. */
3173 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3174 return frame_unwind_got_constant (this_frame, regnum,
3175 arm_addr_bits_remove (gdbarch, lr));
3176
3177 case ARM_PS_REGNUM:
3178 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3179 cpsr = get_frame_register_unsigned (this_frame, regnum);
3180 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3181 if (IS_THUMB_ADDR (lr))
3182 cpsr |= t_bit;
3183 else
3184 cpsr &= ~t_bit;
3185 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3186
3187 default:
3188 internal_error (__FILE__, __LINE__,
3189 _("Unexpected register %d"), regnum);
3190 }
3191 }
3192
3193 static void
3194 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3195 struct dwarf2_frame_state_reg *reg,
3196 struct frame_info *this_frame)
3197 {
3198 switch (regnum)
3199 {
3200 case ARM_PC_REGNUM:
3201 case ARM_PS_REGNUM:
3202 reg->how = DWARF2_FRAME_REG_FN;
3203 reg->loc.fn = arm_dwarf2_prev_register;
3204 break;
3205 case ARM_SP_REGNUM:
3206 reg->how = DWARF2_FRAME_REG_CFA;
3207 break;
3208 }
3209 }
3210
3211 /* Implement the stack_frame_destroyed_p gdbarch method. */
3212
3213 static int
3214 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3215 {
3216 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3217 unsigned int insn, insn2;
3218 int found_return = 0, found_stack_adjust = 0;
3219 CORE_ADDR func_start, func_end;
3220 CORE_ADDR scan_pc;
3221 gdb_byte buf[4];
3222
3223 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3224 return 0;
3225
3226 /* The epilogue is a sequence of instructions along the following lines:
3227
3228 - add stack frame size to SP or FP
3229 - [if frame pointer used] restore SP from FP
3230 - restore registers from SP [may include PC]
3231 - a return-type instruction [if PC wasn't already restored]
3232
3233 In a first pass, we scan forward from the current PC and verify the
3234 instructions we find as compatible with this sequence, ending in a
3235 return instruction.
3236
3237 However, this is not sufficient to distinguish indirect function calls
3238 within a function from indirect tail calls in the epilogue in some cases.
3239 Therefore, if we didn't already find any SP-changing instruction during
3240 forward scan, we add a backward scanning heuristic to ensure we actually
3241 are in the epilogue. */
3242
3243 scan_pc = pc;
3244 while (scan_pc < func_end && !found_return)
3245 {
3246 if (target_read_memory (scan_pc, buf, 2))
3247 break;
3248
3249 scan_pc += 2;
3250 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3251
3252 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3253 found_return = 1;
3254 else if (insn == 0x46f7) /* mov pc, lr */
3255 found_return = 1;
3256 else if (thumb_instruction_restores_sp (insn))
3257 {
3258 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3259 found_return = 1;
3260 }
3261 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3262 {
3263 if (target_read_memory (scan_pc, buf, 2))
3264 break;
3265
3266 scan_pc += 2;
3267 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3268
3269 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3270 {
3271 if (insn2 & 0x8000) /* <registers> include PC. */
3272 found_return = 1;
3273 }
3274 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3275 && (insn2 & 0x0fff) == 0x0b04)
3276 {
3277 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3278 found_return = 1;
3279 }
3280 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3281 && (insn2 & 0x0e00) == 0x0a00)
3282 ;
3283 else
3284 break;
3285 }
3286 else
3287 break;
3288 }
3289
3290 if (!found_return)
3291 return 0;
3292
3293 /* Since any instruction in the epilogue sequence, with the possible
3294 exception of return itself, updates the stack pointer, we need to
3295 scan backwards for at most one instruction. Try either a 16-bit or
3296 a 32-bit instruction. This is just a heuristic, so we do not worry
3297 too much about false positives. */
3298
3299 if (pc - 4 < func_start)
3300 return 0;
3301 if (target_read_memory (pc - 4, buf, 4))
3302 return 0;
3303
3304 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3305 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3306
3307 if (thumb_instruction_restores_sp (insn2))
3308 found_stack_adjust = 1;
3309 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3310 found_stack_adjust = 1;
3311 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3312 && (insn2 & 0x0fff) == 0x0b04)
3313 found_stack_adjust = 1;
3314 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3315 && (insn2 & 0x0e00) == 0x0a00)
3316 found_stack_adjust = 1;
3317
3318 return found_stack_adjust;
3319 }
3320
3321 static int
3322 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3323 {
3324 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3325 unsigned int insn;
3326 int found_return;
3327 CORE_ADDR func_start, func_end;
3328
3329 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3330 return 0;
3331
3332 /* We are in the epilogue if the previous instruction was a stack
3333 adjustment and the next instruction is a possible return (bx, mov
3334 pc, or pop). We could have to scan backwards to find the stack
3335 adjustment, or forwards to find the return, but this is a decent
3336 approximation. First scan forwards. */
3337
3338 found_return = 0;
3339 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3340 if (bits (insn, 28, 31) != INST_NV)
3341 {
3342 if ((insn & 0x0ffffff0) == 0x012fff10)
3343 /* BX. */
3344 found_return = 1;
3345 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3346 /* MOV PC. */
3347 found_return = 1;
3348 else if ((insn & 0x0fff0000) == 0x08bd0000
3349 && (insn & 0x0000c000) != 0)
3350 /* POP (LDMIA), including PC or LR. */
3351 found_return = 1;
3352 }
3353
3354 if (!found_return)
3355 return 0;
3356
3357 /* Scan backwards. This is just a heuristic, so do not worry about
3358 false positives from mode changes. */
3359
3360 if (pc < func_start + 4)
3361 return 0;
3362
3363 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3364 if (arm_instruction_restores_sp (insn))
3365 return 1;
3366
3367 return 0;
3368 }
3369
3370 /* Implement the stack_frame_destroyed_p gdbarch method. */
3371
3372 static int
3373 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3374 {
3375 if (arm_pc_is_thumb (gdbarch, pc))
3376 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3377 else
3378 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3379 }
3380
3381 /* When arguments must be pushed onto the stack, they go on in reverse
3382 order. The code below implements a FILO (stack) to do this. */
3383
3384 struct stack_item
3385 {
3386 int len;
3387 struct stack_item *prev;
3388 gdb_byte *data;
3389 };
3390
3391 static struct stack_item *
3392 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3393 {
3394 struct stack_item *si;
3395 si = XNEW (struct stack_item);
3396 si->data = (gdb_byte *) xmalloc (len);
3397 si->len = len;
3398 si->prev = prev;
3399 memcpy (si->data, contents, len);
3400 return si;
3401 }
3402
3403 static struct stack_item *
3404 pop_stack_item (struct stack_item *si)
3405 {
3406 struct stack_item *dead = si;
3407 si = si->prev;
3408 xfree (dead->data);
3409 xfree (dead);
3410 return si;
3411 }
3412
3413 /* Implement the gdbarch type alignment method, overrides the generic
3414 alignment algorithm for anything that is arm specific. */
3415
3416 static ULONGEST
3417 arm_type_align (gdbarch *gdbarch, struct type *t)
3418 {
3419 t = check_typedef (t);
3420 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3421 {
3422 /* Use the natural alignment for vector types (the same for
3423 scalar type), but the maximum alignment is 64-bit. */
3424 if (TYPE_LENGTH (t) > 8)
3425 return 8;
3426 else
3427 return TYPE_LENGTH (t);
3428 }
3429
3430 /* Allow the common code to calculate the alignment. */
3431 return 0;
3432 }
3433
3434 /* Possible base types for a candidate for passing and returning in
3435 VFP registers. */
3436
3437 enum arm_vfp_cprc_base_type
3438 {
3439 VFP_CPRC_UNKNOWN,
3440 VFP_CPRC_SINGLE,
3441 VFP_CPRC_DOUBLE,
3442 VFP_CPRC_VEC64,
3443 VFP_CPRC_VEC128
3444 };
3445
3446 /* The length of one element of base type B. */
3447
3448 static unsigned
3449 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3450 {
3451 switch (b)
3452 {
3453 case VFP_CPRC_SINGLE:
3454 return 4;
3455 case VFP_CPRC_DOUBLE:
3456 return 8;
3457 case VFP_CPRC_VEC64:
3458 return 8;
3459 case VFP_CPRC_VEC128:
3460 return 16;
3461 default:
3462 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3463 (int) b);
3464 }
3465 }
3466
3467 /* The character ('s', 'd' or 'q') for the type of VFP register used
3468 for passing base type B. */
3469
3470 static int
3471 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3472 {
3473 switch (b)
3474 {
3475 case VFP_CPRC_SINGLE:
3476 return 's';
3477 case VFP_CPRC_DOUBLE:
3478 return 'd';
3479 case VFP_CPRC_VEC64:
3480 return 'd';
3481 case VFP_CPRC_VEC128:
3482 return 'q';
3483 default:
3484 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3485 (int) b);
3486 }
3487 }
3488
3489 /* Determine whether T may be part of a candidate for passing and
3490 returning in VFP registers, ignoring the limit on the total number
3491 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3492 classification of the first valid component found; if it is not
3493 VFP_CPRC_UNKNOWN, all components must have the same classification
3494 as *BASE_TYPE. If it is found that T contains a type not permitted
3495 for passing and returning in VFP registers, a type differently
3496 classified from *BASE_TYPE, or two types differently classified
3497 from each other, return -1, otherwise return the total number of
3498 base-type elements found (possibly 0 in an empty structure or
3499 array). Vector types are not currently supported, matching the
3500 generic AAPCS support. */
3501
3502 static int
3503 arm_vfp_cprc_sub_candidate (struct type *t,
3504 enum arm_vfp_cprc_base_type *base_type)
3505 {
3506 t = check_typedef (t);
3507 switch (t->code ())
3508 {
3509 case TYPE_CODE_FLT:
3510 switch (TYPE_LENGTH (t))
3511 {
3512 case 4:
3513 if (*base_type == VFP_CPRC_UNKNOWN)
3514 *base_type = VFP_CPRC_SINGLE;
3515 else if (*base_type != VFP_CPRC_SINGLE)
3516 return -1;
3517 return 1;
3518
3519 case 8:
3520 if (*base_type == VFP_CPRC_UNKNOWN)
3521 *base_type = VFP_CPRC_DOUBLE;
3522 else if (*base_type != VFP_CPRC_DOUBLE)
3523 return -1;
3524 return 1;
3525
3526 default:
3527 return -1;
3528 }
3529 break;
3530
3531 case TYPE_CODE_COMPLEX:
3532 /* Arguments of complex T where T is one of the types float or
3533 double get treated as if they are implemented as:
3534
3535 struct complexT
3536 {
3537 T real;
3538 T imag;
3539 };
3540
3541 */
3542 switch (TYPE_LENGTH (t))
3543 {
3544 case 8:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_SINGLE;
3547 else if (*base_type != VFP_CPRC_SINGLE)
3548 return -1;
3549 return 2;
3550
3551 case 16:
3552 if (*base_type == VFP_CPRC_UNKNOWN)
3553 *base_type = VFP_CPRC_DOUBLE;
3554 else if (*base_type != VFP_CPRC_DOUBLE)
3555 return -1;
3556 return 2;
3557
3558 default:
3559 return -1;
3560 }
3561 break;
3562
3563 case TYPE_CODE_ARRAY:
3564 {
3565 if (t->is_vector ())
3566 {
3567 /* A 64-bit or 128-bit containerized vector type are VFP
3568 CPRCs. */
3569 switch (TYPE_LENGTH (t))
3570 {
3571 case 8:
3572 if (*base_type == VFP_CPRC_UNKNOWN)
3573 *base_type = VFP_CPRC_VEC64;
3574 return 1;
3575 case 16:
3576 if (*base_type == VFP_CPRC_UNKNOWN)
3577 *base_type = VFP_CPRC_VEC128;
3578 return 1;
3579 default:
3580 return -1;
3581 }
3582 }
3583 else
3584 {
3585 int count;
3586 unsigned unitlen;
3587
3588 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3589 base_type);
3590 if (count == -1)
3591 return -1;
3592 if (TYPE_LENGTH (t) == 0)
3593 {
3594 gdb_assert (count == 0);
3595 return 0;
3596 }
3597 else if (count == 0)
3598 return -1;
3599 unitlen = arm_vfp_cprc_unit_length (*base_type);
3600 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3601 return TYPE_LENGTH (t) / unitlen;
3602 }
3603 }
3604 break;
3605
3606 case TYPE_CODE_STRUCT:
3607 {
3608 int count = 0;
3609 unsigned unitlen;
3610 int i;
3611 for (i = 0; i < t->num_fields (); i++)
3612 {
3613 int sub_count = 0;
3614
3615 if (!field_is_static (&t->field (i)))
3616 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3617 base_type);
3618 if (sub_count == -1)
3619 return -1;
3620 count += sub_count;
3621 }
3622 if (TYPE_LENGTH (t) == 0)
3623 {
3624 gdb_assert (count == 0);
3625 return 0;
3626 }
3627 else if (count == 0)
3628 return -1;
3629 unitlen = arm_vfp_cprc_unit_length (*base_type);
3630 if (TYPE_LENGTH (t) != unitlen * count)
3631 return -1;
3632 return count;
3633 }
3634
3635 case TYPE_CODE_UNION:
3636 {
3637 int count = 0;
3638 unsigned unitlen;
3639 int i;
3640 for (i = 0; i < t->num_fields (); i++)
3641 {
3642 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3643 base_type);
3644 if (sub_count == -1)
3645 return -1;
3646 count = (count > sub_count ? count : sub_count);
3647 }
3648 if (TYPE_LENGTH (t) == 0)
3649 {
3650 gdb_assert (count == 0);
3651 return 0;
3652 }
3653 else if (count == 0)
3654 return -1;
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 if (TYPE_LENGTH (t) != unitlen * count)
3657 return -1;
3658 return count;
3659 }
3660
3661 default:
3662 break;
3663 }
3664
3665 return -1;
3666 }
3667
3668 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3669 if passed to or returned from a non-variadic function with the VFP
3670 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3671 *BASE_TYPE to the base type for T and *COUNT to the number of
3672 elements of that base type before returning. */
3673
3674 static int
3675 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3676 int *count)
3677 {
3678 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3679 int c = arm_vfp_cprc_sub_candidate (t, &b);
3680 if (c <= 0 || c > 4)
3681 return 0;
3682 *base_type = b;
3683 *count = c;
3684 return 1;
3685 }
3686
3687 /* Return 1 if the VFP ABI should be used for passing arguments to and
3688 returning values from a function of type FUNC_TYPE, 0
3689 otherwise. */
3690
3691 static int
3692 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3693 {
3694 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3695 /* Variadic functions always use the base ABI. Assume that functions
3696 without debug info are not variadic. */
3697 if (func_type && check_typedef (func_type)->has_varargs ())
3698 return 0;
3699 /* The VFP ABI is only supported as a variant of AAPCS. */
3700 if (tdep->arm_abi != ARM_ABI_AAPCS)
3701 return 0;
3702 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3703 }
3704
3705 /* We currently only support passing parameters in integer registers, which
3706 conforms with GCC's default model, and VFP argument passing following
3707 the VFP variant of AAPCS. Several other variants exist and
3708 we should probably support some of them based on the selected ABI. */
3709
3710 static CORE_ADDR
3711 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3712 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3713 struct value **args, CORE_ADDR sp,
3714 function_call_return_method return_method,
3715 CORE_ADDR struct_addr)
3716 {
3717 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3718 int argnum;
3719 int argreg;
3720 int nstack;
3721 struct stack_item *si = NULL;
3722 int use_vfp_abi;
3723 struct type *ftype;
3724 unsigned vfp_regs_free = (1 << 16) - 1;
3725
3726 /* Determine the type of this function and whether the VFP ABI
3727 applies. */
3728 ftype = check_typedef (value_type (function));
3729 if (ftype->code () == TYPE_CODE_PTR)
3730 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3731 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3732
3733 /* Set the return address. For the ARM, the return breakpoint is
3734 always at BP_ADDR. */
3735 if (arm_pc_is_thumb (gdbarch, bp_addr))
3736 bp_addr |= 1;
3737 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3738
3739 /* Walk through the list of args and determine how large a temporary
3740 stack is required. Need to take care here as structs may be
3741 passed on the stack, and we have to push them. */
3742 nstack = 0;
3743
3744 argreg = ARM_A1_REGNUM;
3745 nstack = 0;
3746
3747 /* The struct_return pointer occupies the first parameter
3748 passing register. */
3749 if (return_method == return_method_struct)
3750 {
3751 if (arm_debug)
3752 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3753 gdbarch_register_name (gdbarch, argreg),
3754 paddress (gdbarch, struct_addr));
3755 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3756 argreg++;
3757 }
3758
3759 for (argnum = 0; argnum < nargs; argnum++)
3760 {
3761 int len;
3762 struct type *arg_type;
3763 struct type *target_type;
3764 enum type_code typecode;
3765 const bfd_byte *val;
3766 int align;
3767 enum arm_vfp_cprc_base_type vfp_base_type;
3768 int vfp_base_count;
3769 int may_use_core_reg = 1;
3770
3771 arg_type = check_typedef (value_type (args[argnum]));
3772 len = TYPE_LENGTH (arg_type);
3773 target_type = TYPE_TARGET_TYPE (arg_type);
3774 typecode = arg_type->code ();
3775 val = value_contents (args[argnum]);
3776
3777 align = type_align (arg_type);
3778 /* Round alignment up to a whole number of words. */
3779 align = (align + ARM_INT_REGISTER_SIZE - 1)
3780 & ~(ARM_INT_REGISTER_SIZE - 1);
3781 /* Different ABIs have different maximum alignments. */
3782 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3783 {
3784 /* The APCS ABI only requires word alignment. */
3785 align = ARM_INT_REGISTER_SIZE;
3786 }
3787 else
3788 {
3789 /* The AAPCS requires at most doubleword alignment. */
3790 if (align > ARM_INT_REGISTER_SIZE * 2)
3791 align = ARM_INT_REGISTER_SIZE * 2;
3792 }
3793
3794 if (use_vfp_abi
3795 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3796 &vfp_base_count))
3797 {
3798 int regno;
3799 int unit_length;
3800 int shift;
3801 unsigned mask;
3802
3803 /* Because this is a CPRC it cannot go in a core register or
3804 cause a core register to be skipped for alignment.
3805 Either it goes in VFP registers and the rest of this loop
3806 iteration is skipped for this argument, or it goes on the
3807 stack (and the stack alignment code is correct for this
3808 case). */
3809 may_use_core_reg = 0;
3810
3811 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3812 shift = unit_length / 4;
3813 mask = (1 << (shift * vfp_base_count)) - 1;
3814 for (regno = 0; regno < 16; regno += shift)
3815 if (((vfp_regs_free >> regno) & mask) == mask)
3816 break;
3817
3818 if (regno < 16)
3819 {
3820 int reg_char;
3821 int reg_scaled;
3822 int i;
3823
3824 vfp_regs_free &= ~(mask << regno);
3825 reg_scaled = regno / shift;
3826 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3827 for (i = 0; i < vfp_base_count; i++)
3828 {
3829 char name_buf[4];
3830 int regnum;
3831 if (reg_char == 'q')
3832 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3833 val + i * unit_length);
3834 else
3835 {
3836 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3837 reg_char, reg_scaled + i);
3838 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3839 strlen (name_buf));
3840 regcache->cooked_write (regnum, val + i * unit_length);
3841 }
3842 }
3843 continue;
3844 }
3845 else
3846 {
3847 /* This CPRC could not go in VFP registers, so all VFP
3848 registers are now marked as used. */
3849 vfp_regs_free = 0;
3850 }
3851 }
3852
3853 /* Push stack padding for doubleword alignment. */
3854 if (nstack & (align - 1))
3855 {
3856 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3857 nstack += ARM_INT_REGISTER_SIZE;
3858 }
3859
3860 /* Doubleword aligned quantities must go in even register pairs. */
3861 if (may_use_core_reg
3862 && argreg <= ARM_LAST_ARG_REGNUM
3863 && align > ARM_INT_REGISTER_SIZE
3864 && argreg & 1)
3865 argreg++;
3866
3867 /* If the argument is a pointer to a function, and it is a
3868 Thumb function, create a LOCAL copy of the value and set
3869 the THUMB bit in it. */
3870 if (TYPE_CODE_PTR == typecode
3871 && target_type != NULL
3872 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3873 {
3874 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3875 if (arm_pc_is_thumb (gdbarch, regval))
3876 {
3877 bfd_byte *copy = (bfd_byte *) alloca (len);
3878 store_unsigned_integer (copy, len, byte_order,
3879 MAKE_THUMB_ADDR (regval));
3880 val = copy;
3881 }
3882 }
3883
3884 /* Copy the argument to general registers or the stack in
3885 register-sized pieces. Large arguments are split between
3886 registers and stack. */
3887 while (len > 0)
3888 {
3889 int partial_len = len < ARM_INT_REGISTER_SIZE
3890 ? len : ARM_INT_REGISTER_SIZE;
3891 CORE_ADDR regval
3892 = extract_unsigned_integer (val, partial_len, byte_order);
3893
3894 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3895 {
3896 /* The argument is being passed in a general purpose
3897 register. */
3898 if (byte_order == BFD_ENDIAN_BIG)
3899 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3900 if (arm_debug)
3901 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3902 argnum,
3903 gdbarch_register_name
3904 (gdbarch, argreg),
3905 phex (regval, ARM_INT_REGISTER_SIZE));
3906 regcache_cooked_write_unsigned (regcache, argreg, regval);
3907 argreg++;
3908 }
3909 else
3910 {
3911 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3912
3913 memset (buf, 0, sizeof (buf));
3914 store_unsigned_integer (buf, partial_len, byte_order, regval);
3915
3916 /* Push the arguments onto the stack. */
3917 if (arm_debug)
3918 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3919 argnum, nstack);
3920 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3921 nstack += ARM_INT_REGISTER_SIZE;
3922 }
3923
3924 len -= partial_len;
3925 val += partial_len;
3926 }
3927 }
3928 /* If we have an odd number of words to push, then decrement the stack
3929 by one word now, so first stack argument will be dword aligned. */
3930 if (nstack & 4)
3931 sp -= 4;
3932
3933 while (si)
3934 {
3935 sp -= si->len;
3936 write_memory (sp, si->data, si->len);
3937 si = pop_stack_item (si);
3938 }
3939
3940 /* Finally, update teh SP register. */
3941 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3942
3943 return sp;
3944 }
3945
3946
3947 /* Always align the frame to an 8-byte boundary. This is required on
3948 some platforms and harmless on the rest. */
3949
3950 static CORE_ADDR
3951 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3952 {
3953 /* Align the stack to eight bytes. */
3954 return sp & ~ (CORE_ADDR) 7;
3955 }
3956
3957 static void
3958 print_fpu_flags (struct ui_file *file, int flags)
3959 {
3960 if (flags & (1 << 0))
3961 fputs_filtered ("IVO ", file);
3962 if (flags & (1 << 1))
3963 fputs_filtered ("DVZ ", file);
3964 if (flags & (1 << 2))
3965 fputs_filtered ("OFL ", file);
3966 if (flags & (1 << 3))
3967 fputs_filtered ("UFL ", file);
3968 if (flags & (1 << 4))
3969 fputs_filtered ("INX ", file);
3970 fputc_filtered ('\n', file);
3971 }
3972
3973 /* Print interesting information about the floating point processor
3974 (if present) or emulator. */
3975 static void
3976 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3977 struct frame_info *frame, const char *args)
3978 {
3979 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3980 int type;
3981
3982 type = (status >> 24) & 127;
3983 if (status & (1 << 31))
3984 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3985 else
3986 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3987 /* i18n: [floating point unit] mask */
3988 fputs_filtered (_("mask: "), file);
3989 print_fpu_flags (file, status >> 16);
3990 /* i18n: [floating point unit] flags */
3991 fputs_filtered (_("flags: "), file);
3992 print_fpu_flags (file, status);
3993 }
3994
3995 /* Construct the ARM extended floating point type. */
3996 static struct type *
3997 arm_ext_type (struct gdbarch *gdbarch)
3998 {
3999 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4000
4001 if (!tdep->arm_ext_type)
4002 tdep->arm_ext_type
4003 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4004 floatformats_arm_ext);
4005
4006 return tdep->arm_ext_type;
4007 }
4008
4009 static struct type *
4010 arm_neon_double_type (struct gdbarch *gdbarch)
4011 {
4012 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4013
4014 if (tdep->neon_double_type == NULL)
4015 {
4016 struct type *t, *elem;
4017
4018 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4019 TYPE_CODE_UNION);
4020 elem = builtin_type (gdbarch)->builtin_uint8;
4021 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4022 elem = builtin_type (gdbarch)->builtin_uint16;
4023 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4024 elem = builtin_type (gdbarch)->builtin_uint32;
4025 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4026 elem = builtin_type (gdbarch)->builtin_uint64;
4027 append_composite_type_field (t, "u64", elem);
4028 elem = builtin_type (gdbarch)->builtin_float;
4029 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4030 elem = builtin_type (gdbarch)->builtin_double;
4031 append_composite_type_field (t, "f64", elem);
4032
4033 t->set_is_vector (true);
4034 t->set_name ("neon_d");
4035 tdep->neon_double_type = t;
4036 }
4037
4038 return tdep->neon_double_type;
4039 }
4040
4041 /* FIXME: The vector types are not correctly ordered on big-endian
4042 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4043 bits of d0 - regardless of what unit size is being held in d0. So
4044 the offset of the first uint8 in d0 is 7, but the offset of the
4045 first float is 4. This code works as-is for little-endian
4046 targets. */
4047
4048 static struct type *
4049 arm_neon_quad_type (struct gdbarch *gdbarch)
4050 {
4051 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4052
4053 if (tdep->neon_quad_type == NULL)
4054 {
4055 struct type *t, *elem;
4056
4057 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4058 TYPE_CODE_UNION);
4059 elem = builtin_type (gdbarch)->builtin_uint8;
4060 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4061 elem = builtin_type (gdbarch)->builtin_uint16;
4062 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint32;
4064 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint64;
4066 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_float;
4068 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4069 elem = builtin_type (gdbarch)->builtin_double;
4070 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4071
4072 t->set_is_vector (true);
4073 t->set_name ("neon_q");
4074 tdep->neon_quad_type = t;
4075 }
4076
4077 return tdep->neon_quad_type;
4078 }
4079
4080 /* Return the GDB type object for the "standard" data type of data in
4081 register N. */
4082
4083 static struct type *
4084 arm_register_type (struct gdbarch *gdbarch, int regnum)
4085 {
4086 int num_regs = gdbarch_num_regs (gdbarch);
4087
4088 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4089 && regnum >= num_regs && regnum < num_regs + 32)
4090 return builtin_type (gdbarch)->builtin_float;
4091
4092 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4093 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4094 return arm_neon_quad_type (gdbarch);
4095
4096 /* If the target description has register information, we are only
4097 in this function so that we can override the types of
4098 double-precision registers for NEON. */
4099 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4100 {
4101 struct type *t = tdesc_register_type (gdbarch, regnum);
4102
4103 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4104 && t->code () == TYPE_CODE_FLT
4105 && gdbarch_tdep (gdbarch)->have_neon)
4106 return arm_neon_double_type (gdbarch);
4107 else
4108 return t;
4109 }
4110
4111 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4112 {
4113 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4114 return builtin_type (gdbarch)->builtin_void;
4115
4116 return arm_ext_type (gdbarch);
4117 }
4118 else if (regnum == ARM_SP_REGNUM)
4119 return builtin_type (gdbarch)->builtin_data_ptr;
4120 else if (regnum == ARM_PC_REGNUM)
4121 return builtin_type (gdbarch)->builtin_func_ptr;
4122 else if (regnum >= ARRAY_SIZE (arm_register_names))
4123 /* These registers are only supported on targets which supply
4124 an XML description. */
4125 return builtin_type (gdbarch)->builtin_int0;
4126 else
4127 return builtin_type (gdbarch)->builtin_uint32;
4128 }
4129
4130 /* Map a DWARF register REGNUM onto the appropriate GDB register
4131 number. */
4132
4133 static int
4134 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4135 {
4136 /* Core integer regs. */
4137 if (reg >= 0 && reg <= 15)
4138 return reg;
4139
4140 /* Legacy FPA encoding. These were once used in a way which
4141 overlapped with VFP register numbering, so their use is
4142 discouraged, but GDB doesn't support the ARM toolchain
4143 which used them for VFP. */
4144 if (reg >= 16 && reg <= 23)
4145 return ARM_F0_REGNUM + reg - 16;
4146
4147 /* New assignments for the FPA registers. */
4148 if (reg >= 96 && reg <= 103)
4149 return ARM_F0_REGNUM + reg - 96;
4150
4151 /* WMMX register assignments. */
4152 if (reg >= 104 && reg <= 111)
4153 return ARM_WCGR0_REGNUM + reg - 104;
4154
4155 if (reg >= 112 && reg <= 127)
4156 return ARM_WR0_REGNUM + reg - 112;
4157
4158 if (reg >= 192 && reg <= 199)
4159 return ARM_WC0_REGNUM + reg - 192;
4160
4161 /* VFP v2 registers. A double precision value is actually
4162 in d1 rather than s2, but the ABI only defines numbering
4163 for the single precision registers. This will "just work"
4164 in GDB for little endian targets (we'll read eight bytes,
4165 starting in s0 and then progressing to s1), but will be
4166 reversed on big endian targets with VFP. This won't
4167 be a problem for the new Neon quad registers; you're supposed
4168 to use DW_OP_piece for those. */
4169 if (reg >= 64 && reg <= 95)
4170 {
4171 char name_buf[4];
4172
4173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4175 strlen (name_buf));
4176 }
4177
4178 /* VFP v3 / Neon registers. This range is also used for VFP v2
4179 registers, except that it now describes d0 instead of s0. */
4180 if (reg >= 256 && reg <= 287)
4181 {
4182 char name_buf[4];
4183
4184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4186 strlen (name_buf));
4187 }
4188
4189 return -1;
4190 }
4191
4192 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4193 static int
4194 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4195 {
4196 int reg = regnum;
4197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4198
4199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4201
4202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4204
4205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4207
4208 if (reg < NUM_GREGS)
4209 return SIM_ARM_R0_REGNUM + reg;
4210 reg -= NUM_GREGS;
4211
4212 if (reg < NUM_FREGS)
4213 return SIM_ARM_FP0_REGNUM + reg;
4214 reg -= NUM_FREGS;
4215
4216 if (reg < NUM_SREGS)
4217 return SIM_ARM_FPS_REGNUM + reg;
4218 reg -= NUM_SREGS;
4219
4220 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4221 }
4222
4223 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4224 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4225 NULL if an error occurs. BUF is freed. */
4226
4227 static gdb_byte *
4228 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4229 int old_len, int new_len)
4230 {
4231 gdb_byte *new_buf;
4232 int bytes_to_read = new_len - old_len;
4233
4234 new_buf = (gdb_byte *) xmalloc (new_len);
4235 memcpy (new_buf + bytes_to_read, buf, old_len);
4236 xfree (buf);
4237 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4238 {
4239 xfree (new_buf);
4240 return NULL;
4241 }
4242 return new_buf;
4243 }
4244
4245 /* An IT block is at most the 2-byte IT instruction followed by
4246 four 4-byte instructions. The furthest back we must search to
4247 find an IT block that affects the current instruction is thus
4248 2 + 3 * 4 == 14 bytes. */
4249 #define MAX_IT_BLOCK_PREFIX 14
4250
4251 /* Use a quick scan if there are more than this many bytes of
4252 code. */
4253 #define IT_SCAN_THRESHOLD 32
4254
4255 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4256 A breakpoint in an IT block may not be hit, depending on the
4257 condition flags. */
4258 static CORE_ADDR
4259 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4260 {
4261 gdb_byte *buf;
4262 char map_type;
4263 CORE_ADDR boundary, func_start;
4264 int buf_len;
4265 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4266 int i, any, last_it, last_it_count;
4267
4268 /* If we are using BKPT breakpoints, none of this is necessary. */
4269 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4270 return bpaddr;
4271
4272 /* ARM mode does not have this problem. */
4273 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4274 return bpaddr;
4275
4276 /* We are setting a breakpoint in Thumb code that could potentially
4277 contain an IT block. The first step is to find how much Thumb
4278 code there is; we do not need to read outside of known Thumb
4279 sequences. */
4280 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4281 if (map_type == 0)
4282 /* Thumb-2 code must have mapping symbols to have a chance. */
4283 return bpaddr;
4284
4285 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4286
4287 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4288 && func_start > boundary)
4289 boundary = func_start;
4290
4291 /* Search for a candidate IT instruction. We have to do some fancy
4292 footwork to distinguish a real IT instruction from the second
4293 half of a 32-bit instruction, but there is no need for that if
4294 there's no candidate. */
4295 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4296 if (buf_len == 0)
4297 /* No room for an IT instruction. */
4298 return bpaddr;
4299
4300 buf = (gdb_byte *) xmalloc (buf_len);
4301 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4302 return bpaddr;
4303 any = 0;
4304 for (i = 0; i < buf_len; i += 2)
4305 {
4306 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4307 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4308 {
4309 any = 1;
4310 break;
4311 }
4312 }
4313
4314 if (any == 0)
4315 {
4316 xfree (buf);
4317 return bpaddr;
4318 }
4319
4320 /* OK, the code bytes before this instruction contain at least one
4321 halfword which resembles an IT instruction. We know that it's
4322 Thumb code, but there are still two possibilities. Either the
4323 halfword really is an IT instruction, or it is the second half of
4324 a 32-bit Thumb instruction. The only way we can tell is to
4325 scan forwards from a known instruction boundary. */
4326 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4327 {
4328 int definite;
4329
4330 /* There's a lot of code before this instruction. Start with an
4331 optimistic search; it's easy to recognize halfwords that can
4332 not be the start of a 32-bit instruction, and use that to
4333 lock on to the instruction boundaries. */
4334 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4335 if (buf == NULL)
4336 return bpaddr;
4337 buf_len = IT_SCAN_THRESHOLD;
4338
4339 definite = 0;
4340 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4341 {
4342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4343 if (thumb_insn_size (inst1) == 2)
4344 {
4345 definite = 1;
4346 break;
4347 }
4348 }
4349
4350 /* At this point, if DEFINITE, BUF[I] is the first place we
4351 are sure that we know the instruction boundaries, and it is far
4352 enough from BPADDR that we could not miss an IT instruction
4353 affecting BPADDR. If ! DEFINITE, give up - start from a
4354 known boundary. */
4355 if (! definite)
4356 {
4357 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4358 bpaddr - boundary);
4359 if (buf == NULL)
4360 return bpaddr;
4361 buf_len = bpaddr - boundary;
4362 i = 0;
4363 }
4364 }
4365 else
4366 {
4367 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4368 if (buf == NULL)
4369 return bpaddr;
4370 buf_len = bpaddr - boundary;
4371 i = 0;
4372 }
4373
4374 /* Scan forwards. Find the last IT instruction before BPADDR. */
4375 last_it = -1;
4376 last_it_count = 0;
4377 while (i < buf_len)
4378 {
4379 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4380 last_it_count--;
4381 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4382 {
4383 last_it = i;
4384 if (inst1 & 0x0001)
4385 last_it_count = 4;
4386 else if (inst1 & 0x0002)
4387 last_it_count = 3;
4388 else if (inst1 & 0x0004)
4389 last_it_count = 2;
4390 else
4391 last_it_count = 1;
4392 }
4393 i += thumb_insn_size (inst1);
4394 }
4395
4396 xfree (buf);
4397
4398 if (last_it == -1)
4399 /* There wasn't really an IT instruction after all. */
4400 return bpaddr;
4401
4402 if (last_it_count < 1)
4403 /* It was too far away. */
4404 return bpaddr;
4405
4406 /* This really is a trouble spot. Move the breakpoint to the IT
4407 instruction. */
4408 return bpaddr - buf_len + last_it;
4409 }
4410
4411 /* ARM displaced stepping support.
4412
4413 Generally ARM displaced stepping works as follows:
4414
4415 1. When an instruction is to be single-stepped, it is first decoded by
4416 arm_process_displaced_insn. Depending on the type of instruction, it is
4417 then copied to a scratch location, possibly in a modified form. The
4418 copy_* set of functions performs such modification, as necessary. A
4419 breakpoint is placed after the modified instruction in the scratch space
4420 to return control to GDB. Note in particular that instructions which
4421 modify the PC will no longer do so after modification.
4422
4423 2. The instruction is single-stepped, by setting the PC to the scratch
4424 location address, and resuming. Control returns to GDB when the
4425 breakpoint is hit.
4426
4427 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4428 function used for the current instruction. This function's job is to
4429 put the CPU/memory state back to what it would have been if the
4430 instruction had been executed unmodified in its original location. */
4431
4432 /* NOP instruction (mov r0, r0). */
4433 #define ARM_NOP 0xe1a00000
4434 #define THUMB_NOP 0x4600
4435
4436 /* Helper for register reads for displaced stepping. In particular, this
4437 returns the PC as it would be seen by the instruction at its original
4438 location. */
4439
4440 ULONGEST
4441 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4442 int regno)
4443 {
4444 ULONGEST ret;
4445 CORE_ADDR from = dsc->insn_addr;
4446
4447 if (regno == ARM_PC_REGNUM)
4448 {
4449 /* Compute pipeline offset:
4450 - When executing an ARM instruction, PC reads as the address of the
4451 current instruction plus 8.
4452 - When executing a Thumb instruction, PC reads as the address of the
4453 current instruction plus 4. */
4454
4455 if (!dsc->is_thumb)
4456 from += 8;
4457 else
4458 from += 4;
4459
4460 if (debug_displaced)
4461 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4462 (unsigned long) from);
4463 return (ULONGEST) from;
4464 }
4465 else
4466 {
4467 regcache_cooked_read_unsigned (regs, regno, &ret);
4468 if (debug_displaced)
4469 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4470 regno, (unsigned long) ret);
4471 return ret;
4472 }
4473 }
4474
4475 static int
4476 displaced_in_arm_mode (struct regcache *regs)
4477 {
4478 ULONGEST ps;
4479 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4480
4481 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4482
4483 return (ps & t_bit) == 0;
4484 }
4485
4486 /* Write to the PC as from a branch instruction. */
4487
4488 static void
4489 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4490 ULONGEST val)
4491 {
4492 if (!dsc->is_thumb)
4493 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4494 architecture versions < 6. */
4495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4496 val & ~(ULONGEST) 0x3);
4497 else
4498 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4499 val & ~(ULONGEST) 0x1);
4500 }
4501
4502 /* Write to the PC as from a branch-exchange instruction. */
4503
4504 static void
4505 bx_write_pc (struct regcache *regs, ULONGEST val)
4506 {
4507 ULONGEST ps;
4508 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4509
4510 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4511
4512 if ((val & 1) == 1)
4513 {
4514 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4515 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4516 }
4517 else if ((val & 2) == 0)
4518 {
4519 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4520 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4521 }
4522 else
4523 {
4524 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4525 mode, align dest to 4 bytes). */
4526 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4527 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4528 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4529 }
4530 }
4531
4532 /* Write to the PC as if from a load instruction. */
4533
4534 static void
4535 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4536 ULONGEST val)
4537 {
4538 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4539 bx_write_pc (regs, val);
4540 else
4541 branch_write_pc (regs, dsc, val);
4542 }
4543
4544 /* Write to the PC as if from an ALU instruction. */
4545
4546 static void
4547 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4548 ULONGEST val)
4549 {
4550 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4551 bx_write_pc (regs, val);
4552 else
4553 branch_write_pc (regs, dsc, val);
4554 }
4555
4556 /* Helper for writing to registers for displaced stepping. Writing to the PC
4557 has a varying effects depending on the instruction which does the write:
4558 this is controlled by the WRITE_PC argument. */
4559
4560 void
4561 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4562 int regno, ULONGEST val, enum pc_write_style write_pc)
4563 {
4564 if (regno == ARM_PC_REGNUM)
4565 {
4566 if (debug_displaced)
4567 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4568 (unsigned long) val);
4569 switch (write_pc)
4570 {
4571 case BRANCH_WRITE_PC:
4572 branch_write_pc (regs, dsc, val);
4573 break;
4574
4575 case BX_WRITE_PC:
4576 bx_write_pc (regs, val);
4577 break;
4578
4579 case LOAD_WRITE_PC:
4580 load_write_pc (regs, dsc, val);
4581 break;
4582
4583 case ALU_WRITE_PC:
4584 alu_write_pc (regs, dsc, val);
4585 break;
4586
4587 case CANNOT_WRITE_PC:
4588 warning (_("Instruction wrote to PC in an unexpected way when "
4589 "single-stepping"));
4590 break;
4591
4592 default:
4593 internal_error (__FILE__, __LINE__,
4594 _("Invalid argument to displaced_write_reg"));
4595 }
4596
4597 dsc->wrote_to_pc = 1;
4598 }
4599 else
4600 {
4601 if (debug_displaced)
4602 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4603 regno, (unsigned long) val);
4604 regcache_cooked_write_unsigned (regs, regno, val);
4605 }
4606 }
4607
4608 /* This function is used to concisely determine if an instruction INSN
4609 references PC. Register fields of interest in INSN should have the
4610 corresponding fields of BITMASK set to 0b1111. The function
4611 returns return 1 if any of these fields in INSN reference the PC
4612 (also 0b1111, r15), else it returns 0. */
4613
4614 static int
4615 insn_references_pc (uint32_t insn, uint32_t bitmask)
4616 {
4617 uint32_t lowbit = 1;
4618
4619 while (bitmask != 0)
4620 {
4621 uint32_t mask;
4622
4623 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4624 ;
4625
4626 if (!lowbit)
4627 break;
4628
4629 mask = lowbit * 0xf;
4630
4631 if ((insn & mask) == mask)
4632 return 1;
4633
4634 bitmask &= ~mask;
4635 }
4636
4637 return 0;
4638 }
4639
4640 /* The simplest copy function. Many instructions have the same effect no
4641 matter what address they are executed at: in those cases, use this. */
4642
4643 static int
4644 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4645 const char *iname, arm_displaced_step_closure *dsc)
4646 {
4647 if (debug_displaced)
4648 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4649 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4650 iname);
4651
4652 dsc->modinsn[0] = insn;
4653
4654 return 0;
4655 }
4656
4657 static int
4658 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4659 uint16_t insn2, const char *iname,
4660 arm_displaced_step_closure *dsc)
4661 {
4662 if (debug_displaced)
4663 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4664 "opcode/class '%s' unmodified\n", insn1, insn2,
4665 iname);
4666
4667 dsc->modinsn[0] = insn1;
4668 dsc->modinsn[1] = insn2;
4669 dsc->numinsns = 2;
4670
4671 return 0;
4672 }
4673
4674 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4675 modification. */
4676 static int
4677 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4678 const char *iname,
4679 arm_displaced_step_closure *dsc)
4680 {
4681 if (debug_displaced)
4682 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4683 "opcode/class '%s' unmodified\n", insn,
4684 iname);
4685
4686 dsc->modinsn[0] = insn;
4687
4688 return 0;
4689 }
4690
4691 /* Preload instructions with immediate offset. */
4692
4693 static void
4694 cleanup_preload (struct gdbarch *gdbarch,
4695 struct regcache *regs, arm_displaced_step_closure *dsc)
4696 {
4697 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4698 if (!dsc->u.preload.immed)
4699 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4700 }
4701
4702 static void
4703 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4704 arm_displaced_step_closure *dsc, unsigned int rn)
4705 {
4706 ULONGEST rn_val;
4707 /* Preload instructions:
4708
4709 {pli/pld} [rn, #+/-imm]
4710 ->
4711 {pli/pld} [r0, #+/-imm]. */
4712
4713 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4714 rn_val = displaced_read_reg (regs, dsc, rn);
4715 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4716 dsc->u.preload.immed = 1;
4717
4718 dsc->cleanup = &cleanup_preload;
4719 }
4720
4721 static int
4722 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4723 arm_displaced_step_closure *dsc)
4724 {
4725 unsigned int rn = bits (insn, 16, 19);
4726
4727 if (!insn_references_pc (insn, 0x000f0000ul))
4728 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4729
4730 if (debug_displaced)
4731 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4732 (unsigned long) insn);
4733
4734 dsc->modinsn[0] = insn & 0xfff0ffff;
4735
4736 install_preload (gdbarch, regs, dsc, rn);
4737
4738 return 0;
4739 }
4740
4741 static int
4742 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4743 struct regcache *regs, arm_displaced_step_closure *dsc)
4744 {
4745 unsigned int rn = bits (insn1, 0, 3);
4746 unsigned int u_bit = bit (insn1, 7);
4747 int imm12 = bits (insn2, 0, 11);
4748 ULONGEST pc_val;
4749
4750 if (rn != ARM_PC_REGNUM)
4751 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4752
4753 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4754 PLD (literal) Encoding T1. */
4755 if (debug_displaced)
4756 fprintf_unfiltered (gdb_stdlog,
4757 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4758 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4759 imm12);
4760
4761 if (!u_bit)
4762 imm12 = -1 * imm12;
4763
4764 /* Rewrite instruction {pli/pld} PC imm12 into:
4765 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4766
4767 {pli/pld} [r0, r1]
4768
4769 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4770
4771 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4772 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4773
4774 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4775
4776 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4777 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4778 dsc->u.preload.immed = 0;
4779
4780 /* {pli/pld} [r0, r1] */
4781 dsc->modinsn[0] = insn1 & 0xfff0;
4782 dsc->modinsn[1] = 0xf001;
4783 dsc->numinsns = 2;
4784
4785 dsc->cleanup = &cleanup_preload;
4786 return 0;
4787 }
4788
4789 /* Preload instructions with register offset. */
4790
4791 static void
4792 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4793 arm_displaced_step_closure *dsc, unsigned int rn,
4794 unsigned int rm)
4795 {
4796 ULONGEST rn_val, rm_val;
4797
4798 /* Preload register-offset instructions:
4799
4800 {pli/pld} [rn, rm {, shift}]
4801 ->
4802 {pli/pld} [r0, r1 {, shift}]. */
4803
4804 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4805 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4806 rn_val = displaced_read_reg (regs, dsc, rn);
4807 rm_val = displaced_read_reg (regs, dsc, rm);
4808 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4809 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4810 dsc->u.preload.immed = 0;
4811
4812 dsc->cleanup = &cleanup_preload;
4813 }
4814
4815 static int
4816 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4817 struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4819 {
4820 unsigned int rn = bits (insn, 16, 19);
4821 unsigned int rm = bits (insn, 0, 3);
4822
4823
4824 if (!insn_references_pc (insn, 0x000f000ful))
4825 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4826
4827 if (debug_displaced)
4828 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4829 (unsigned long) insn);
4830
4831 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4832
4833 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4834 return 0;
4835 }
4836
4837 /* Copy/cleanup coprocessor load and store instructions. */
4838
4839 static void
4840 cleanup_copro_load_store (struct gdbarch *gdbarch,
4841 struct regcache *regs,
4842 arm_displaced_step_closure *dsc)
4843 {
4844 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4845
4846 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4847
4848 if (dsc->u.ldst.writeback)
4849 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4850 }
4851
4852 static void
4853 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4854 arm_displaced_step_closure *dsc,
4855 int writeback, unsigned int rn)
4856 {
4857 ULONGEST rn_val;
4858
4859 /* Coprocessor load/store instructions:
4860
4861 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4862 ->
4863 {stc/stc2} [r0, #+/-imm].
4864
4865 ldc/ldc2 are handled identically. */
4866
4867 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4868 rn_val = displaced_read_reg (regs, dsc, rn);
4869 /* PC should be 4-byte aligned. */
4870 rn_val = rn_val & 0xfffffffc;
4871 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4872
4873 dsc->u.ldst.writeback = writeback;
4874 dsc->u.ldst.rn = rn;
4875
4876 dsc->cleanup = &cleanup_copro_load_store;
4877 }
4878
4879 static int
4880 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs,
4882 arm_displaced_step_closure *dsc)
4883 {
4884 unsigned int rn = bits (insn, 16, 19);
4885
4886 if (!insn_references_pc (insn, 0x000f0000ul))
4887 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4888
4889 if (debug_displaced)
4890 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4891 "load/store insn %.8lx\n", (unsigned long) insn);
4892
4893 dsc->modinsn[0] = insn & 0xfff0ffff;
4894
4895 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4896
4897 return 0;
4898 }
4899
4900 static int
4901 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4902 uint16_t insn2, struct regcache *regs,
4903 arm_displaced_step_closure *dsc)
4904 {
4905 unsigned int rn = bits (insn1, 0, 3);
4906
4907 if (rn != ARM_PC_REGNUM)
4908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4909 "copro load/store", dsc);
4910
4911 if (debug_displaced)
4912 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4913 "load/store insn %.4x%.4x\n", insn1, insn2);
4914
4915 dsc->modinsn[0] = insn1 & 0xfff0;
4916 dsc->modinsn[1] = insn2;
4917 dsc->numinsns = 2;
4918
4919 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4920 doesn't support writeback, so pass 0. */
4921 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4922
4923 return 0;
4924 }
4925
4926 /* Clean up branch instructions (actually perform the branch, by setting
4927 PC). */
4928
4929 static void
4930 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4931 arm_displaced_step_closure *dsc)
4932 {
4933 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4934 int branch_taken = condition_true (dsc->u.branch.cond, status);
4935 enum pc_write_style write_pc = dsc->u.branch.exchange
4936 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4937
4938 if (!branch_taken)
4939 return;
4940
4941 if (dsc->u.branch.link)
4942 {
4943 /* The value of LR should be the next insn of current one. In order
4944 not to confuse logic handling later insn `bx lr', if current insn mode
4945 is Thumb, the bit 0 of LR value should be set to 1. */
4946 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4947
4948 if (dsc->is_thumb)
4949 next_insn_addr |= 0x1;
4950
4951 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4952 CANNOT_WRITE_PC);
4953 }
4954
4955 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4956 }
4957
4958 /* Copy B/BL/BLX instructions with immediate destinations. */
4959
4960 static void
4961 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4962 arm_displaced_step_closure *dsc,
4963 unsigned int cond, int exchange, int link, long offset)
4964 {
4965 /* Implement "BL<cond> <label>" as:
4966
4967 Preparation: cond <- instruction condition
4968 Insn: mov r0, r0 (nop)
4969 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4970
4971 B<cond> similar, but don't set r14 in cleanup. */
4972
4973 dsc->u.branch.cond = cond;
4974 dsc->u.branch.link = link;
4975 dsc->u.branch.exchange = exchange;
4976
4977 dsc->u.branch.dest = dsc->insn_addr;
4978 if (link && exchange)
4979 /* For BLX, offset is computed from the Align (PC, 4). */
4980 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4981
4982 if (dsc->is_thumb)
4983 dsc->u.branch.dest += 4 + offset;
4984 else
4985 dsc->u.branch.dest += 8 + offset;
4986
4987 dsc->cleanup = &cleanup_branch;
4988 }
4989 static int
4990 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4991 struct regcache *regs, arm_displaced_step_closure *dsc)
4992 {
4993 unsigned int cond = bits (insn, 28, 31);
4994 int exchange = (cond == 0xf);
4995 int link = exchange || bit (insn, 24);
4996 long offset;
4997
4998 if (debug_displaced)
4999 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5000 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5001 (unsigned long) insn);
5002 if (exchange)
5003 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5004 then arrange the switch into Thumb mode. */
5005 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5006 else
5007 offset = bits (insn, 0, 23) << 2;
5008
5009 if (bit (offset, 25))
5010 offset = offset | ~0x3ffffff;
5011
5012 dsc->modinsn[0] = ARM_NOP;
5013
5014 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5015 return 0;
5016 }
5017
5018 static int
5019 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5020 uint16_t insn2, struct regcache *regs,
5021 arm_displaced_step_closure *dsc)
5022 {
5023 int link = bit (insn2, 14);
5024 int exchange = link && !bit (insn2, 12);
5025 int cond = INST_AL;
5026 long offset = 0;
5027 int j1 = bit (insn2, 13);
5028 int j2 = bit (insn2, 11);
5029 int s = sbits (insn1, 10, 10);
5030 int i1 = !(j1 ^ bit (insn1, 10));
5031 int i2 = !(j2 ^ bit (insn1, 10));
5032
5033 if (!link && !exchange) /* B */
5034 {
5035 offset = (bits (insn2, 0, 10) << 1);
5036 if (bit (insn2, 12)) /* Encoding T4 */
5037 {
5038 offset |= (bits (insn1, 0, 9) << 12)
5039 | (i2 << 22)
5040 | (i1 << 23)
5041 | (s << 24);
5042 cond = INST_AL;
5043 }
5044 else /* Encoding T3 */
5045 {
5046 offset |= (bits (insn1, 0, 5) << 12)
5047 | (j1 << 18)
5048 | (j2 << 19)
5049 | (s << 20);
5050 cond = bits (insn1, 6, 9);
5051 }
5052 }
5053 else
5054 {
5055 offset = (bits (insn1, 0, 9) << 12);
5056 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5057 offset |= exchange ?
5058 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5059 }
5060
5061 if (debug_displaced)
5062 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5063 "%.4x %.4x with offset %.8lx\n",
5064 link ? (exchange) ? "blx" : "bl" : "b",
5065 insn1, insn2, offset);
5066
5067 dsc->modinsn[0] = THUMB_NOP;
5068
5069 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5070 return 0;
5071 }
5072
5073 /* Copy B Thumb instructions. */
5074 static int
5075 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5076 arm_displaced_step_closure *dsc)
5077 {
5078 unsigned int cond = 0;
5079 int offset = 0;
5080 unsigned short bit_12_15 = bits (insn, 12, 15);
5081 CORE_ADDR from = dsc->insn_addr;
5082
5083 if (bit_12_15 == 0xd)
5084 {
5085 /* offset = SignExtend (imm8:0, 32) */
5086 offset = sbits ((insn << 1), 0, 8);
5087 cond = bits (insn, 8, 11);
5088 }
5089 else if (bit_12_15 == 0xe) /* Encoding T2 */
5090 {
5091 offset = sbits ((insn << 1), 0, 11);
5092 cond = INST_AL;
5093 }
5094
5095 if (debug_displaced)
5096 fprintf_unfiltered (gdb_stdlog,
5097 "displaced: copying b immediate insn %.4x "
5098 "with offset %d\n", insn, offset);
5099
5100 dsc->u.branch.cond = cond;
5101 dsc->u.branch.link = 0;
5102 dsc->u.branch.exchange = 0;
5103 dsc->u.branch.dest = from + 4 + offset;
5104
5105 dsc->modinsn[0] = THUMB_NOP;
5106
5107 dsc->cleanup = &cleanup_branch;
5108
5109 return 0;
5110 }
5111
5112 /* Copy BX/BLX with register-specified destinations. */
5113
5114 static void
5115 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5116 arm_displaced_step_closure *dsc, int link,
5117 unsigned int cond, unsigned int rm)
5118 {
5119 /* Implement {BX,BLX}<cond> <reg>" as:
5120
5121 Preparation: cond <- instruction condition
5122 Insn: mov r0, r0 (nop)
5123 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5124
5125 Don't set r14 in cleanup for BX. */
5126
5127 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5128
5129 dsc->u.branch.cond = cond;
5130 dsc->u.branch.link = link;
5131
5132 dsc->u.branch.exchange = 1;
5133
5134 dsc->cleanup = &cleanup_branch;
5135 }
5136
5137 static int
5138 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5139 struct regcache *regs, arm_displaced_step_closure *dsc)
5140 {
5141 unsigned int cond = bits (insn, 28, 31);
5142 /* BX: x12xxx1x
5143 BLX: x12xxx3x. */
5144 int link = bit (insn, 5);
5145 unsigned int rm = bits (insn, 0, 3);
5146
5147 if (debug_displaced)
5148 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5149 (unsigned long) insn);
5150
5151 dsc->modinsn[0] = ARM_NOP;
5152
5153 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5154 return 0;
5155 }
5156
5157 static int
5158 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5159 struct regcache *regs,
5160 arm_displaced_step_closure *dsc)
5161 {
5162 int link = bit (insn, 7);
5163 unsigned int rm = bits (insn, 3, 6);
5164
5165 if (debug_displaced)
5166 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5167 (unsigned short) insn);
5168
5169 dsc->modinsn[0] = THUMB_NOP;
5170
5171 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5172
5173 return 0;
5174 }
5175
5176
5177 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5178
5179 static void
5180 cleanup_alu_imm (struct gdbarch *gdbarch,
5181 struct regcache *regs, arm_displaced_step_closure *dsc)
5182 {
5183 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5184 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5185 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5186 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5187 }
5188
5189 static int
5190 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5191 arm_displaced_step_closure *dsc)
5192 {
5193 unsigned int rn = bits (insn, 16, 19);
5194 unsigned int rd = bits (insn, 12, 15);
5195 unsigned int op = bits (insn, 21, 24);
5196 int is_mov = (op == 0xd);
5197 ULONGEST rd_val, rn_val;
5198
5199 if (!insn_references_pc (insn, 0x000ff000ul))
5200 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5201
5202 if (debug_displaced)
5203 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5204 "%.8lx\n", is_mov ? "move" : "ALU",
5205 (unsigned long) insn);
5206
5207 /* Instruction is of form:
5208
5209 <op><cond> rd, [rn,] #imm
5210
5211 Rewrite as:
5212
5213 Preparation: tmp1, tmp2 <- r0, r1;
5214 r0, r1 <- rd, rn
5215 Insn: <op><cond> r0, r1, #imm
5216 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5217 */
5218
5219 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5220 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5221 rn_val = displaced_read_reg (regs, dsc, rn);
5222 rd_val = displaced_read_reg (regs, dsc, rd);
5223 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5224 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5225 dsc->rd = rd;
5226
5227 if (is_mov)
5228 dsc->modinsn[0] = insn & 0xfff00fff;
5229 else
5230 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5231
5232 dsc->cleanup = &cleanup_alu_imm;
5233
5234 return 0;
5235 }
5236
5237 static int
5238 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5239 uint16_t insn2, struct regcache *regs,
5240 arm_displaced_step_closure *dsc)
5241 {
5242 unsigned int op = bits (insn1, 5, 8);
5243 unsigned int rn, rm, rd;
5244 ULONGEST rd_val, rn_val;
5245
5246 rn = bits (insn1, 0, 3); /* Rn */
5247 rm = bits (insn2, 0, 3); /* Rm */
5248 rd = bits (insn2, 8, 11); /* Rd */
5249
5250 /* This routine is only called for instruction MOV. */
5251 gdb_assert (op == 0x2 && rn == 0xf);
5252
5253 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5255
5256 if (debug_displaced)
5257 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5258 "ALU", insn1, insn2);
5259
5260 /* Instruction is of form:
5261
5262 <op><cond> rd, [rn,] #imm
5263
5264 Rewrite as:
5265
5266 Preparation: tmp1, tmp2 <- r0, r1;
5267 r0, r1 <- rd, rn
5268 Insn: <op><cond> r0, r1, #imm
5269 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5270 */
5271
5272 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5273 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5274 rn_val = displaced_read_reg (regs, dsc, rn);
5275 rd_val = displaced_read_reg (regs, dsc, rd);
5276 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5277 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5278 dsc->rd = rd;
5279
5280 dsc->modinsn[0] = insn1;
5281 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5282 dsc->numinsns = 2;
5283
5284 dsc->cleanup = &cleanup_alu_imm;
5285
5286 return 0;
5287 }
5288
5289 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5290
5291 static void
5292 cleanup_alu_reg (struct gdbarch *gdbarch,
5293 struct regcache *regs, arm_displaced_step_closure *dsc)
5294 {
5295 ULONGEST rd_val;
5296 int i;
5297
5298 rd_val = displaced_read_reg (regs, dsc, 0);
5299
5300 for (i = 0; i < 3; i++)
5301 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5302
5303 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5304 }
5305
5306 static void
5307 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5308 arm_displaced_step_closure *dsc,
5309 unsigned int rd, unsigned int rn, unsigned int rm)
5310 {
5311 ULONGEST rd_val, rn_val, rm_val;
5312
5313 /* Instruction is of form:
5314
5315 <op><cond> rd, [rn,] rm [, <shift>]
5316
5317 Rewrite as:
5318
5319 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5320 r0, r1, r2 <- rd, rn, rm
5321 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5322 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5323 */
5324
5325 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5326 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5327 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5328 rd_val = displaced_read_reg (regs, dsc, rd);
5329 rn_val = displaced_read_reg (regs, dsc, rn);
5330 rm_val = displaced_read_reg (regs, dsc, rm);
5331 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5332 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5333 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5334 dsc->rd = rd;
5335
5336 dsc->cleanup = &cleanup_alu_reg;
5337 }
5338
5339 static int
5340 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5341 arm_displaced_step_closure *dsc)
5342 {
5343 unsigned int op = bits (insn, 21, 24);
5344 int is_mov = (op == 0xd);
5345
5346 if (!insn_references_pc (insn, 0x000ff00ful))
5347 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5348
5349 if (debug_displaced)
5350 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5351 is_mov ? "move" : "ALU", (unsigned long) insn);
5352
5353 if (is_mov)
5354 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5355 else
5356 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5357
5358 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5359 bits (insn, 0, 3));
5360 return 0;
5361 }
5362
5363 static int
5364 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5365 struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 unsigned rm, rd;
5369
5370 rm = bits (insn, 3, 6);
5371 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5372
5373 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5374 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5375
5376 if (debug_displaced)
5377 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5378 (unsigned short) insn);
5379
5380 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5381
5382 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5383
5384 return 0;
5385 }
5386
5387 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5388
5389 static void
5390 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5391 struct regcache *regs,
5392 arm_displaced_step_closure *dsc)
5393 {
5394 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5395 int i;
5396
5397 for (i = 0; i < 4; i++)
5398 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5399
5400 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5401 }
5402
5403 static void
5404 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5405 arm_displaced_step_closure *dsc,
5406 unsigned int rd, unsigned int rn, unsigned int rm,
5407 unsigned rs)
5408 {
5409 int i;
5410 ULONGEST rd_val, rn_val, rm_val, rs_val;
5411
5412 /* Instruction is of form:
5413
5414 <op><cond> rd, [rn,] rm, <shift> rs
5415
5416 Rewrite as:
5417
5418 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5419 r0, r1, r2, r3 <- rd, rn, rm, rs
5420 Insn: <op><cond> r0, r1, r2, <shift> r3
5421 Cleanup: tmp5 <- r0
5422 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5423 rd <- tmp5
5424 */
5425
5426 for (i = 0; i < 4; i++)
5427 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5428
5429 rd_val = displaced_read_reg (regs, dsc, rd);
5430 rn_val = displaced_read_reg (regs, dsc, rn);
5431 rm_val = displaced_read_reg (regs, dsc, rm);
5432 rs_val = displaced_read_reg (regs, dsc, rs);
5433 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5434 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5435 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5436 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5437 dsc->rd = rd;
5438 dsc->cleanup = &cleanup_alu_shifted_reg;
5439 }
5440
5441 static int
5442 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5443 struct regcache *regs,
5444 arm_displaced_step_closure *dsc)
5445 {
5446 unsigned int op = bits (insn, 21, 24);
5447 int is_mov = (op == 0xd);
5448 unsigned int rd, rn, rm, rs;
5449
5450 if (!insn_references_pc (insn, 0x000fff0ful))
5451 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5452
5453 if (debug_displaced)
5454 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5455 "%.8lx\n", is_mov ? "move" : "ALU",
5456 (unsigned long) insn);
5457
5458 rn = bits (insn, 16, 19);
5459 rm = bits (insn, 0, 3);
5460 rs = bits (insn, 8, 11);
5461 rd = bits (insn, 12, 15);
5462
5463 if (is_mov)
5464 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5465 else
5466 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5467
5468 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5469
5470 return 0;
5471 }
5472
5473 /* Clean up load instructions. */
5474
5475 static void
5476 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5477 arm_displaced_step_closure *dsc)
5478 {
5479 ULONGEST rt_val, rt_val2 = 0, rn_val;
5480
5481 rt_val = displaced_read_reg (regs, dsc, 0);
5482 if (dsc->u.ldst.xfersize == 8)
5483 rt_val2 = displaced_read_reg (regs, dsc, 1);
5484 rn_val = displaced_read_reg (regs, dsc, 2);
5485
5486 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5487 if (dsc->u.ldst.xfersize > 4)
5488 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5489 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5490 if (!dsc->u.ldst.immed)
5491 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5492
5493 /* Handle register writeback. */
5494 if (dsc->u.ldst.writeback)
5495 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5496 /* Put result in right place. */
5497 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5498 if (dsc->u.ldst.xfersize == 8)
5499 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5500 }
5501
5502 /* Clean up store instructions. */
5503
5504 static void
5505 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5506 arm_displaced_step_closure *dsc)
5507 {
5508 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5509
5510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5511 if (dsc->u.ldst.xfersize > 4)
5512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5514 if (!dsc->u.ldst.immed)
5515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5516 if (!dsc->u.ldst.restore_r4)
5517 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5518
5519 /* Writeback. */
5520 if (dsc->u.ldst.writeback)
5521 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5522 }
5523
5524 /* Copy "extra" load/store instructions. These are halfword/doubleword
5525 transfers, which have a different encoding to byte/word transfers. */
5526
5527 static int
5528 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5529 struct regcache *regs, arm_displaced_step_closure *dsc)
5530 {
5531 unsigned int op1 = bits (insn, 20, 24);
5532 unsigned int op2 = bits (insn, 5, 6);
5533 unsigned int rt = bits (insn, 12, 15);
5534 unsigned int rn = bits (insn, 16, 19);
5535 unsigned int rm = bits (insn, 0, 3);
5536 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5537 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5538 int immed = (op1 & 0x4) != 0;
5539 int opcode;
5540 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5541
5542 if (!insn_references_pc (insn, 0x000ff00ful))
5543 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5544
5545 if (debug_displaced)
5546 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5547 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5548 (unsigned long) insn);
5549
5550 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5551
5552 if (opcode < 0)
5553 internal_error (__FILE__, __LINE__,
5554 _("copy_extra_ld_st: instruction decode error"));
5555
5556 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5557 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5558 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5559 if (!immed)
5560 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5561
5562 rt_val = displaced_read_reg (regs, dsc, rt);
5563 if (bytesize[opcode] == 8)
5564 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5565 rn_val = displaced_read_reg (regs, dsc, rn);
5566 if (!immed)
5567 rm_val = displaced_read_reg (regs, dsc, rm);
5568
5569 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5570 if (bytesize[opcode] == 8)
5571 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5572 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5573 if (!immed)
5574 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5575
5576 dsc->rd = rt;
5577 dsc->u.ldst.xfersize = bytesize[opcode];
5578 dsc->u.ldst.rn = rn;
5579 dsc->u.ldst.immed = immed;
5580 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5581 dsc->u.ldst.restore_r4 = 0;
5582
5583 if (immed)
5584 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5585 ->
5586 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5587 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5588 else
5589 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5590 ->
5591 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5592 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5593
5594 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5595
5596 return 0;
5597 }
5598
5599 /* Copy byte/half word/word loads and stores. */
5600
5601 static void
5602 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5603 arm_displaced_step_closure *dsc, int load,
5604 int immed, int writeback, int size, int usermode,
5605 int rt, int rm, int rn)
5606 {
5607 ULONGEST rt_val, rn_val, rm_val = 0;
5608
5609 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5610 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5611 if (!immed)
5612 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5613 if (!load)
5614 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5615
5616 rt_val = displaced_read_reg (regs, dsc, rt);
5617 rn_val = displaced_read_reg (regs, dsc, rn);
5618 if (!immed)
5619 rm_val = displaced_read_reg (regs, dsc, rm);
5620
5621 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5622 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5623 if (!immed)
5624 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5625 dsc->rd = rt;
5626 dsc->u.ldst.xfersize = size;
5627 dsc->u.ldst.rn = rn;
5628 dsc->u.ldst.immed = immed;
5629 dsc->u.ldst.writeback = writeback;
5630
5631 /* To write PC we can do:
5632
5633 Before this sequence of instructions:
5634 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5635 r2 is the Rn value got from displaced_read_reg.
5636
5637 Insn1: push {pc} Write address of STR instruction + offset on stack
5638 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5639 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5640 = addr(Insn1) + offset - addr(Insn3) - 8
5641 = offset - 16
5642 Insn4: add r4, r4, #8 r4 = offset - 8
5643 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5644 = from + offset
5645 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5646
5647 Otherwise we don't know what value to write for PC, since the offset is
5648 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5649 of this can be found in Section "Saving from r15" in
5650 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5651
5652 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5653 }
5654
5655
5656 static int
5657 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5658 uint16_t insn2, struct regcache *regs,
5659 arm_displaced_step_closure *dsc, int size)
5660 {
5661 unsigned int u_bit = bit (insn1, 7);
5662 unsigned int rt = bits (insn2, 12, 15);
5663 int imm12 = bits (insn2, 0, 11);
5664 ULONGEST pc_val;
5665
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5669 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5670 imm12);
5671
5672 if (!u_bit)
5673 imm12 = -1 * imm12;
5674
5675 /* Rewrite instruction LDR Rt imm12 into:
5676
5677 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5678
5679 LDR R0, R2, R3,
5680
5681 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5682
5683
5684 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5685 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5686 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5687
5688 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5689
5690 pc_val = pc_val & 0xfffffffc;
5691
5692 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5693 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5694
5695 dsc->rd = rt;
5696
5697 dsc->u.ldst.xfersize = size;
5698 dsc->u.ldst.immed = 0;
5699 dsc->u.ldst.writeback = 0;
5700 dsc->u.ldst.restore_r4 = 0;
5701
5702 /* LDR R0, R2, R3 */
5703 dsc->modinsn[0] = 0xf852;
5704 dsc->modinsn[1] = 0x3;
5705 dsc->numinsns = 2;
5706
5707 dsc->cleanup = &cleanup_load;
5708
5709 return 0;
5710 }
5711
5712 static int
5713 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5714 uint16_t insn2, struct regcache *regs,
5715 arm_displaced_step_closure *dsc,
5716 int writeback, int immed)
5717 {
5718 unsigned int rt = bits (insn2, 12, 15);
5719 unsigned int rn = bits (insn1, 0, 3);
5720 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5721 /* In LDR (register), there is also a register Rm, which is not allowed to
5722 be PC, so we don't have to check it. */
5723
5724 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5726 dsc);
5727
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog,
5730 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5731 rt, rn, insn1, insn2);
5732
5733 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5734 0, rt, rm, rn);
5735
5736 dsc->u.ldst.restore_r4 = 0;
5737
5738 if (immed)
5739 /* ldr[b]<cond> rt, [rn, #imm], etc.
5740 ->
5741 ldr[b]<cond> r0, [r2, #imm]. */
5742 {
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = insn2 & 0x0fff;
5745 }
5746 else
5747 /* ldr[b]<cond> rt, [rn, rm], etc.
5748 ->
5749 ldr[b]<cond> r0, [r2, r3]. */
5750 {
5751 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5752 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5753 }
5754
5755 dsc->numinsns = 2;
5756
5757 return 0;
5758 }
5759
5760
5761 static int
5762 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5763 struct regcache *regs,
5764 arm_displaced_step_closure *dsc,
5765 int load, int size, int usermode)
5766 {
5767 int immed = !bit (insn, 25);
5768 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5769 unsigned int rt = bits (insn, 12, 15);
5770 unsigned int rn = bits (insn, 16, 19);
5771 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5772
5773 if (!insn_references_pc (insn, 0x000ff00ful))
5774 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5775
5776 if (debug_displaced)
5777 fprintf_unfiltered (gdb_stdlog,
5778 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5779 load ? (size == 1 ? "ldrb" : "ldr")
5780 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5781 rt, rn,
5782 (unsigned long) insn);
5783
5784 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5785 usermode, rt, rm, rn);
5786
5787 if (load || rt != ARM_PC_REGNUM)
5788 {
5789 dsc->u.ldst.restore_r4 = 0;
5790
5791 if (immed)
5792 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5793 ->
5794 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5795 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5796 else
5797 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5798 ->
5799 {ldr,str}[b]<cond> r0, [r2, r3]. */
5800 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5801 }
5802 else
5803 {
5804 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5805 dsc->u.ldst.restore_r4 = 1;
5806 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5807 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5808 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5809 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5810 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5811
5812 /* As above. */
5813 if (immed)
5814 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5815 else
5816 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5817
5818 dsc->numinsns = 6;
5819 }
5820
5821 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5822
5823 return 0;
5824 }
5825
5826 /* Cleanup LDM instructions with fully-populated register list. This is an
5827 unfortunate corner case: it's impossible to implement correctly by modifying
5828 the instruction. The issue is as follows: we have an instruction,
5829
5830 ldm rN, {r0-r15}
5831
5832 which we must rewrite to avoid loading PC. A possible solution would be to
5833 do the load in two halves, something like (with suitable cleanup
5834 afterwards):
5835
5836 mov r8, rN
5837 ldm[id][ab] r8!, {r0-r7}
5838 str r7, <temp>
5839 ldm[id][ab] r8, {r7-r14}
5840 <bkpt>
5841
5842 but at present there's no suitable place for <temp>, since the scratch space
5843 is overwritten before the cleanup routine is called. For now, we simply
5844 emulate the instruction. */
5845
5846 static void
5847 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5848 arm_displaced_step_closure *dsc)
5849 {
5850 int inc = dsc->u.block.increment;
5851 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5852 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5853 uint32_t regmask = dsc->u.block.regmask;
5854 int regno = inc ? 0 : 15;
5855 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5856 int exception_return = dsc->u.block.load && dsc->u.block.user
5857 && (regmask & 0x8000) != 0;
5858 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5859 int do_transfer = condition_true (dsc->u.block.cond, status);
5860 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5861
5862 if (!do_transfer)
5863 return;
5864
5865 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5866 sensible we can do here. Complain loudly. */
5867 if (exception_return)
5868 error (_("Cannot single-step exception return"));
5869
5870 /* We don't handle any stores here for now. */
5871 gdb_assert (dsc->u.block.load != 0);
5872
5873 if (debug_displaced)
5874 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5875 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5876 dsc->u.block.increment ? "inc" : "dec",
5877 dsc->u.block.before ? "before" : "after");
5878
5879 while (regmask)
5880 {
5881 uint32_t memword;
5882
5883 if (inc)
5884 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5885 regno++;
5886 else
5887 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5888 regno--;
5889
5890 xfer_addr += bump_before;
5891
5892 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5893 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5894
5895 xfer_addr += bump_after;
5896
5897 regmask &= ~(1 << regno);
5898 }
5899
5900 if (dsc->u.block.writeback)
5901 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5902 CANNOT_WRITE_PC);
5903 }
5904
5905 /* Clean up an STM which included the PC in the register list. */
5906
5907 static void
5908 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5909 arm_displaced_step_closure *dsc)
5910 {
5911 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5912 int store_executed = condition_true (dsc->u.block.cond, status);
5913 CORE_ADDR pc_stored_at, transferred_regs
5914 = count_one_bits (dsc->u.block.regmask);
5915 CORE_ADDR stm_insn_addr;
5916 uint32_t pc_val;
5917 long offset;
5918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5919
5920 /* If condition code fails, there's nothing else to do. */
5921 if (!store_executed)
5922 return;
5923
5924 if (dsc->u.block.increment)
5925 {
5926 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5927
5928 if (dsc->u.block.before)
5929 pc_stored_at += 4;
5930 }
5931 else
5932 {
5933 pc_stored_at = dsc->u.block.xfer_addr;
5934
5935 if (dsc->u.block.before)
5936 pc_stored_at -= 4;
5937 }
5938
5939 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5940 stm_insn_addr = dsc->scratch_base;
5941 offset = pc_val - stm_insn_addr;
5942
5943 if (debug_displaced)
5944 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5945 "STM instruction\n", offset);
5946
5947 /* Rewrite the stored PC to the proper value for the non-displaced original
5948 instruction. */
5949 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5950 dsc->insn_addr + offset);
5951 }
5952
5953 /* Clean up an LDM which includes the PC in the register list. We clumped all
5954 the registers in the transferred list into a contiguous range r0...rX (to
5955 avoid loading PC directly and losing control of the debugged program), so we
5956 must undo that here. */
5957
5958 static void
5959 cleanup_block_load_pc (struct gdbarch *gdbarch,
5960 struct regcache *regs,
5961 arm_displaced_step_closure *dsc)
5962 {
5963 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5964 int load_executed = condition_true (dsc->u.block.cond, status);
5965 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5966 unsigned int regs_loaded = count_one_bits (mask);
5967 unsigned int num_to_shuffle = regs_loaded, clobbered;
5968
5969 /* The method employed here will fail if the register list is fully populated
5970 (we need to avoid loading PC directly). */
5971 gdb_assert (num_to_shuffle < 16);
5972
5973 if (!load_executed)
5974 return;
5975
5976 clobbered = (1 << num_to_shuffle) - 1;
5977
5978 while (num_to_shuffle > 0)
5979 {
5980 if ((mask & (1 << write_reg)) != 0)
5981 {
5982 unsigned int read_reg = num_to_shuffle - 1;
5983
5984 if (read_reg != write_reg)
5985 {
5986 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5987 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5990 "loaded register r%d to r%d\n"), read_reg,
5991 write_reg);
5992 }
5993 else if (debug_displaced)
5994 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5995 "r%d already in the right place\n"),
5996 write_reg);
5997
5998 clobbered &= ~(1 << write_reg);
5999
6000 num_to_shuffle--;
6001 }
6002
6003 write_reg--;
6004 }
6005
6006 /* Restore any registers we scribbled over. */
6007 for (write_reg = 0; clobbered != 0; write_reg++)
6008 {
6009 if ((clobbered & (1 << write_reg)) != 0)
6010 {
6011 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6012 CANNOT_WRITE_PC);
6013 if (debug_displaced)
6014 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6015 "clobbered register r%d\n"), write_reg);
6016 clobbered &= ~(1 << write_reg);
6017 }
6018 }
6019
6020 /* Perform register writeback manually. */
6021 if (dsc->u.block.writeback)
6022 {
6023 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6024
6025 if (dsc->u.block.increment)
6026 new_rn_val += regs_loaded * 4;
6027 else
6028 new_rn_val -= regs_loaded * 4;
6029
6030 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6031 CANNOT_WRITE_PC);
6032 }
6033 }
6034
6035 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6036 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6037
6038 static int
6039 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6040 struct regcache *regs,
6041 arm_displaced_step_closure *dsc)
6042 {
6043 int load = bit (insn, 20);
6044 int user = bit (insn, 22);
6045 int increment = bit (insn, 23);
6046 int before = bit (insn, 24);
6047 int writeback = bit (insn, 21);
6048 int rn = bits (insn, 16, 19);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6053 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6060 }
6061
6062 if (debug_displaced)
6063 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6064 "%.8lx\n", (unsigned long) insn);
6065
6066 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6067 dsc->u.block.rn = rn;
6068
6069 dsc->u.block.load = load;
6070 dsc->u.block.user = user;
6071 dsc->u.block.increment = increment;
6072 dsc->u.block.before = before;
6073 dsc->u.block.writeback = writeback;
6074 dsc->u.block.cond = bits (insn, 28, 31);
6075
6076 dsc->u.block.regmask = insn & 0xffff;
6077
6078 if (load)
6079 {
6080 if ((insn & 0xffff) == 0xffff)
6081 {
6082 /* LDM with a fully-populated register list. This case is
6083 particularly tricky. Implement for now by fully emulating the
6084 instruction (which might not behave perfectly in all cases, but
6085 these instructions should be rare enough for that not to matter
6086 too much). */
6087 dsc->modinsn[0] = ARM_NOP;
6088
6089 dsc->cleanup = &cleanup_block_load_all;
6090 }
6091 else
6092 {
6093 /* LDM of a list of registers which includes PC. Implement by
6094 rewriting the list of registers to be transferred into a
6095 contiguous chunk r0...rX before doing the transfer, then shuffling
6096 registers into the correct places in the cleanup routine. */
6097 unsigned int regmask = insn & 0xffff;
6098 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6099 unsigned int i;
6100
6101 for (i = 0; i < num_in_list; i++)
6102 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6103
6104 /* Writeback makes things complicated. We need to avoid clobbering
6105 the base register with one of the registers in our modified
6106 register list, but just using a different register can't work in
6107 all cases, e.g.:
6108
6109 ldm r14!, {r0-r13,pc}
6110
6111 which would need to be rewritten as:
6112
6113 ldm rN!, {r0-r14}
6114
6115 but that can't work, because there's no free register for N.
6116
6117 Solve this by turning off the writeback bit, and emulating
6118 writeback manually in the cleanup routine. */
6119
6120 if (writeback)
6121 insn &= ~(1 << 21);
6122
6123 new_regmask = (1 << num_in_list) - 1;
6124
6125 if (debug_displaced)
6126 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6127 "{..., pc}: original reg list %.4x, modified "
6128 "list %.4x\n"), rn, writeback ? "!" : "",
6129 (int) insn & 0xffff, new_regmask);
6130
6131 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6132
6133 dsc->cleanup = &cleanup_block_load_pc;
6134 }
6135 }
6136 else
6137 {
6138 /* STM of a list of registers which includes PC. Run the instruction
6139 as-is, but out of line: this will store the wrong value for the PC,
6140 so we must manually fix up the memory in the cleanup routine.
6141 Doing things this way has the advantage that we can auto-detect
6142 the offset of the PC write (which is architecture-dependent) in
6143 the cleanup routine. */
6144 dsc->modinsn[0] = insn;
6145
6146 dsc->cleanup = &cleanup_block_store_pc;
6147 }
6148
6149 return 0;
6150 }
6151
6152 static int
6153 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6154 struct regcache *regs,
6155 arm_displaced_step_closure *dsc)
6156 {
6157 int rn = bits (insn1, 0, 3);
6158 int load = bit (insn1, 4);
6159 int writeback = bit (insn1, 5);
6160
6161 /* Block transfers which don't mention PC can be run directly
6162 out-of-line. */
6163 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6164 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6165
6166 if (rn == ARM_PC_REGNUM)
6167 {
6168 warning (_("displaced: Unpredictable LDM or STM with "
6169 "base register r15"));
6170 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6171 "unpredictable ldm/stm", dsc);
6172 }
6173
6174 if (debug_displaced)
6175 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6176 "%.4x%.4x\n", insn1, insn2);
6177
6178 /* Clear bit 13, since it should be always zero. */
6179 dsc->u.block.regmask = (insn2 & 0xdfff);
6180 dsc->u.block.rn = rn;
6181
6182 dsc->u.block.load = load;
6183 dsc->u.block.user = 0;
6184 dsc->u.block.increment = bit (insn1, 7);
6185 dsc->u.block.before = bit (insn1, 8);
6186 dsc->u.block.writeback = writeback;
6187 dsc->u.block.cond = INST_AL;
6188 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6189
6190 if (load)
6191 {
6192 if (dsc->u.block.regmask == 0xffff)
6193 {
6194 /* This branch is impossible to happen. */
6195 gdb_assert (0);
6196 }
6197 else
6198 {
6199 unsigned int regmask = dsc->u.block.regmask;
6200 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6201 unsigned int i;
6202
6203 for (i = 0; i < num_in_list; i++)
6204 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6205
6206 if (writeback)
6207 insn1 &= ~(1 << 5);
6208
6209 new_regmask = (1 << num_in_list) - 1;
6210
6211 if (debug_displaced)
6212 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6213 "{..., pc}: original reg list %.4x, modified "
6214 "list %.4x\n"), rn, writeback ? "!" : "",
6215 (int) dsc->u.block.regmask, new_regmask);
6216
6217 dsc->modinsn[0] = insn1;
6218 dsc->modinsn[1] = (new_regmask & 0xffff);
6219 dsc->numinsns = 2;
6220
6221 dsc->cleanup = &cleanup_block_load_pc;
6222 }
6223 }
6224 else
6225 {
6226 dsc->modinsn[0] = insn1;
6227 dsc->modinsn[1] = insn2;
6228 dsc->numinsns = 2;
6229 dsc->cleanup = &cleanup_block_store_pc;
6230 }
6231 return 0;
6232 }
6233
6234 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6235 This is used to avoid a dependency on BFD's bfd_endian enum. */
6236
6237 ULONGEST
6238 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6239 int byte_order)
6240 {
6241 return read_memory_unsigned_integer (memaddr, len,
6242 (enum bfd_endian) byte_order);
6243 }
6244
6245 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6246
6247 CORE_ADDR
6248 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6249 CORE_ADDR val)
6250 {
6251 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6252 }
6253
6254 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6255
6256 static CORE_ADDR
6257 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6258 {
6259 return 0;
6260 }
6261
6262 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6263
6264 int
6265 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6266 {
6267 return arm_is_thumb (self->regcache);
6268 }
6269
6270 /* single_step() is called just before we want to resume the inferior,
6271 if we want to single-step it but there is no hardware or kernel
6272 single-step support. We find the target of the coming instructions
6273 and breakpoint them. */
6274
6275 std::vector<CORE_ADDR>
6276 arm_software_single_step (struct regcache *regcache)
6277 {
6278 struct gdbarch *gdbarch = regcache->arch ();
6279 struct arm_get_next_pcs next_pcs_ctx;
6280
6281 arm_get_next_pcs_ctor (&next_pcs_ctx,
6282 &arm_get_next_pcs_ops,
6283 gdbarch_byte_order (gdbarch),
6284 gdbarch_byte_order_for_code (gdbarch),
6285 0,
6286 regcache);
6287
6288 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6289
6290 for (CORE_ADDR &pc_ref : next_pcs)
6291 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6292
6293 return next_pcs;
6294 }
6295
6296 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6297 for Linux, where some SVC instructions must be treated specially. */
6298
6299 static void
6300 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6301 arm_displaced_step_closure *dsc)
6302 {
6303 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6304
6305 if (debug_displaced)
6306 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6307 "%.8lx\n", (unsigned long) resume_addr);
6308
6309 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6310 }
6311
6312
6313 /* Common copy routine for svc instruction. */
6314
6315 static int
6316 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6317 arm_displaced_step_closure *dsc)
6318 {
6319 /* Preparation: none.
6320 Insn: unmodified svc.
6321 Cleanup: pc <- insn_addr + insn_size. */
6322
6323 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6324 instruction. */
6325 dsc->wrote_to_pc = 1;
6326
6327 /* Allow OS-specific code to override SVC handling. */
6328 if (dsc->u.svc.copy_svc_os)
6329 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6330 else
6331 {
6332 dsc->cleanup = &cleanup_svc;
6333 return 0;
6334 }
6335 }
6336
6337 static int
6338 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6339 struct regcache *regs, arm_displaced_step_closure *dsc)
6340 {
6341
6342 if (debug_displaced)
6343 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6344 (unsigned long) insn);
6345
6346 dsc->modinsn[0] = insn;
6347
6348 return install_svc (gdbarch, regs, dsc);
6349 }
6350
6351 static int
6352 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6353 struct regcache *regs, arm_displaced_step_closure *dsc)
6354 {
6355
6356 if (debug_displaced)
6357 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6358 insn);
6359
6360 dsc->modinsn[0] = insn;
6361
6362 return install_svc (gdbarch, regs, dsc);
6363 }
6364
6365 /* Copy undefined instructions. */
6366
6367 static int
6368 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6369 arm_displaced_step_closure *dsc)
6370 {
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog,
6373 "displaced: copying undefined insn %.8lx\n",
6374 (unsigned long) insn);
6375
6376 dsc->modinsn[0] = insn;
6377
6378 return 0;
6379 }
6380
6381 static int
6382 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6383 arm_displaced_step_closure *dsc)
6384 {
6385
6386 if (debug_displaced)
6387 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6388 "%.4x %.4x\n", (unsigned short) insn1,
6389 (unsigned short) insn2);
6390
6391 dsc->modinsn[0] = insn1;
6392 dsc->modinsn[1] = insn2;
6393 dsc->numinsns = 2;
6394
6395 return 0;
6396 }
6397
6398 /* Copy unpredictable instructions. */
6399
6400 static int
6401 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6402 arm_displaced_step_closure *dsc)
6403 {
6404 if (debug_displaced)
6405 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6406 "%.8lx\n", (unsigned long) insn);
6407
6408 dsc->modinsn[0] = insn;
6409
6410 return 0;
6411 }
6412
6413 /* The decode_* functions are instruction decoding helpers. They mostly follow
6414 the presentation in the ARM ARM. */
6415
6416 static int
6417 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6418 struct regcache *regs,
6419 arm_displaced_step_closure *dsc)
6420 {
6421 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6422 unsigned int rn = bits (insn, 16, 19);
6423
6424 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6425 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6426 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6427 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6428 else if ((op1 & 0x60) == 0x20)
6429 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6430 else if ((op1 & 0x71) == 0x40)
6431 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6432 dsc);
6433 else if ((op1 & 0x77) == 0x41)
6434 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6435 else if ((op1 & 0x77) == 0x45)
6436 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6437 else if ((op1 & 0x77) == 0x51)
6438 {
6439 if (rn != 0xf)
6440 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6441 else
6442 return arm_copy_unpred (gdbarch, insn, dsc);
6443 }
6444 else if ((op1 & 0x77) == 0x55)
6445 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6446 else if (op1 == 0x57)
6447 switch (op2)
6448 {
6449 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6450 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6451 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6452 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6453 default: return arm_copy_unpred (gdbarch, insn, dsc);
6454 }
6455 else if ((op1 & 0x63) == 0x43)
6456 return arm_copy_unpred (gdbarch, insn, dsc);
6457 else if ((op2 & 0x1) == 0x0)
6458 switch (op1 & ~0x80)
6459 {
6460 case 0x61:
6461 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6462 case 0x65:
6463 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6464 case 0x71: case 0x75:
6465 /* pld/pldw reg. */
6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6467 case 0x63: case 0x67: case 0x73: case 0x77:
6468 return arm_copy_unpred (gdbarch, insn, dsc);
6469 default:
6470 return arm_copy_undef (gdbarch, insn, dsc);
6471 }
6472 else
6473 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6474 }
6475
6476 static int
6477 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6478 struct regcache *regs,
6479 arm_displaced_step_closure *dsc)
6480 {
6481 if (bit (insn, 27) == 0)
6482 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6483 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6484 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6485 {
6486 case 0x0: case 0x2:
6487 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6488
6489 case 0x1: case 0x3:
6490 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6491
6492 case 0x4: case 0x5: case 0x6: case 0x7:
6493 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6494
6495 case 0x8:
6496 switch ((insn & 0xe00000) >> 21)
6497 {
6498 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6499 /* stc/stc2. */
6500 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6501
6502 case 0x2:
6503 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6504
6505 default:
6506 return arm_copy_undef (gdbarch, insn, dsc);
6507 }
6508
6509 case 0x9:
6510 {
6511 int rn_f = (bits (insn, 16, 19) == 0xf);
6512 switch ((insn & 0xe00000) >> 21)
6513 {
6514 case 0x1: case 0x3:
6515 /* ldc/ldc2 imm (undefined for rn == pc). */
6516 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6517 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6518
6519 case 0x2:
6520 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6521
6522 case 0x4: case 0x5: case 0x6: case 0x7:
6523 /* ldc/ldc2 lit (undefined for rn != pc). */
6524 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6525 : arm_copy_undef (gdbarch, insn, dsc);
6526
6527 default:
6528 return arm_copy_undef (gdbarch, insn, dsc);
6529 }
6530 }
6531
6532 case 0xa:
6533 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6534
6535 case 0xb:
6536 if (bits (insn, 16, 19) == 0xf)
6537 /* ldc/ldc2 lit. */
6538 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6539 else
6540 return arm_copy_undef (gdbarch, insn, dsc);
6541
6542 case 0xc:
6543 if (bit (insn, 4))
6544 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6545 else
6546 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6547
6548 case 0xd:
6549 if (bit (insn, 4))
6550 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6551 else
6552 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6553
6554 default:
6555 return arm_copy_undef (gdbarch, insn, dsc);
6556 }
6557 }
6558
6559 /* Decode miscellaneous instructions in dp/misc encoding space. */
6560
6561 static int
6562 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6563 struct regcache *regs,
6564 arm_displaced_step_closure *dsc)
6565 {
6566 unsigned int op2 = bits (insn, 4, 6);
6567 unsigned int op = bits (insn, 21, 22);
6568
6569 switch (op2)
6570 {
6571 case 0x0:
6572 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6573
6574 case 0x1:
6575 if (op == 0x1) /* bx. */
6576 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6577 else if (op == 0x3)
6578 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6579 else
6580 return arm_copy_undef (gdbarch, insn, dsc);
6581
6582 case 0x2:
6583 if (op == 0x1)
6584 /* Not really supported. */
6585 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6586 else
6587 return arm_copy_undef (gdbarch, insn, dsc);
6588
6589 case 0x3:
6590 if (op == 0x1)
6591 return arm_copy_bx_blx_reg (gdbarch, insn,
6592 regs, dsc); /* blx register. */
6593 else
6594 return arm_copy_undef (gdbarch, insn, dsc);
6595
6596 case 0x5:
6597 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6598
6599 case 0x7:
6600 if (op == 0x1)
6601 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6602 else if (op == 0x3)
6603 /* Not really supported. */
6604 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6605 /* Fall through. */
6606
6607 default:
6608 return arm_copy_undef (gdbarch, insn, dsc);
6609 }
6610 }
6611
6612 static int
6613 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6614 struct regcache *regs,
6615 arm_displaced_step_closure *dsc)
6616 {
6617 if (bit (insn, 25))
6618 switch (bits (insn, 20, 24))
6619 {
6620 case 0x10:
6621 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6622
6623 case 0x14:
6624 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6625
6626 case 0x12: case 0x16:
6627 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6628
6629 default:
6630 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6631 }
6632 else
6633 {
6634 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6635
6636 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6637 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6638 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6639 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6641 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6643 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6644 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6645 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6646 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6647 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6648 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6649 /* 2nd arg means "unprivileged". */
6650 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6651 dsc);
6652 }
6653
6654 /* Should be unreachable. */
6655 return 1;
6656 }
6657
6658 static int
6659 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6660 struct regcache *regs,
6661 arm_displaced_step_closure *dsc)
6662 {
6663 int a = bit (insn, 25), b = bit (insn, 4);
6664 uint32_t op1 = bits (insn, 20, 24);
6665
6666 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6667 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6668 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6669 else if ((!a && (op1 & 0x17) == 0x02)
6670 || (a && (op1 & 0x17) == 0x02 && !b))
6671 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6672 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6673 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6674 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6675 else if ((!a && (op1 & 0x17) == 0x03)
6676 || (a && (op1 & 0x17) == 0x03 && !b))
6677 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6678 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6679 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6680 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6681 else if ((!a && (op1 & 0x17) == 0x06)
6682 || (a && (op1 & 0x17) == 0x06 && !b))
6683 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6684 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6685 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6686 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6687 else if ((!a && (op1 & 0x17) == 0x07)
6688 || (a && (op1 & 0x17) == 0x07 && !b))
6689 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6690
6691 /* Should be unreachable. */
6692 return 1;
6693 }
6694
6695 static int
6696 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6697 arm_displaced_step_closure *dsc)
6698 {
6699 switch (bits (insn, 20, 24))
6700 {
6701 case 0x00: case 0x01: case 0x02: case 0x03:
6702 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6703
6704 case 0x04: case 0x05: case 0x06: case 0x07:
6705 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6706
6707 case 0x08: case 0x09: case 0x0a: case 0x0b:
6708 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6709 return arm_copy_unmodified (gdbarch, insn,
6710 "decode/pack/unpack/saturate/reverse", dsc);
6711
6712 case 0x18:
6713 if (bits (insn, 5, 7) == 0) /* op2. */
6714 {
6715 if (bits (insn, 12, 15) == 0xf)
6716 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6717 else
6718 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6719 }
6720 else
6721 return arm_copy_undef (gdbarch, insn, dsc);
6722
6723 case 0x1a: case 0x1b:
6724 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6725 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6726 else
6727 return arm_copy_undef (gdbarch, insn, dsc);
6728
6729 case 0x1c: case 0x1d:
6730 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6731 {
6732 if (bits (insn, 0, 3) == 0xf)
6733 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6734 else
6735 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6736 }
6737 else
6738 return arm_copy_undef (gdbarch, insn, dsc);
6739
6740 case 0x1e: case 0x1f:
6741 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6742 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6743 else
6744 return arm_copy_undef (gdbarch, insn, dsc);
6745 }
6746
6747 /* Should be unreachable. */
6748 return 1;
6749 }
6750
6751 static int
6752 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6753 struct regcache *regs,
6754 arm_displaced_step_closure *dsc)
6755 {
6756 if (bit (insn, 25))
6757 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6758 else
6759 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6760 }
6761
6762 static int
6763 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6764 struct regcache *regs,
6765 arm_displaced_step_closure *dsc)
6766 {
6767 unsigned int opcode = bits (insn, 20, 24);
6768
6769 switch (opcode)
6770 {
6771 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6772 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6773
6774 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6775 case 0x12: case 0x16:
6776 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6777
6778 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6779 case 0x13: case 0x17:
6780 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6781
6782 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6783 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6784 /* Note: no writeback for these instructions. Bit 25 will always be
6785 zero though (via caller), so the following works OK. */
6786 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 }
6788
6789 /* Should be unreachable. */
6790 return 1;
6791 }
6792
6793 /* Decode shifted register instructions. */
6794
6795 static int
6796 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6797 uint16_t insn2, struct regcache *regs,
6798 arm_displaced_step_closure *dsc)
6799 {
6800 /* PC is only allowed to be used in instruction MOV. */
6801
6802 unsigned int op = bits (insn1, 5, 8);
6803 unsigned int rn = bits (insn1, 0, 3);
6804
6805 if (op == 0x2 && rn == 0xf) /* MOV */
6806 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6807 else
6808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6809 "dp (shift reg)", dsc);
6810 }
6811
6812
6813 /* Decode extension register load/store. Exactly the same as
6814 arm_decode_ext_reg_ld_st. */
6815
6816 static int
6817 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6818 uint16_t insn2, struct regcache *regs,
6819 arm_displaced_step_closure *dsc)
6820 {
6821 unsigned int opcode = bits (insn1, 4, 8);
6822
6823 switch (opcode)
6824 {
6825 case 0x04: case 0x05:
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6827 "vfp/neon vmov", dsc);
6828
6829 case 0x08: case 0x0c: /* 01x00 */
6830 case 0x0a: case 0x0e: /* 01x10 */
6831 case 0x12: case 0x16: /* 10x10 */
6832 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6833 "vfp/neon vstm/vpush", dsc);
6834
6835 case 0x09: case 0x0d: /* 01x01 */
6836 case 0x0b: case 0x0f: /* 01x11 */
6837 case 0x13: case 0x17: /* 10x11 */
6838 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6839 "vfp/neon vldm/vpop", dsc);
6840
6841 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6842 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6843 "vstr", dsc);
6844 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6845 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6846 }
6847
6848 /* Should be unreachable. */
6849 return 1;
6850 }
6851
6852 static int
6853 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6854 struct regcache *regs, arm_displaced_step_closure *dsc)
6855 {
6856 unsigned int op1 = bits (insn, 20, 25);
6857 int op = bit (insn, 4);
6858 unsigned int coproc = bits (insn, 8, 11);
6859
6860 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6861 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6862 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6863 && (coproc & 0xe) != 0xa)
6864 /* stc/stc2. */
6865 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6866 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6867 && (coproc & 0xe) != 0xa)
6868 /* ldc/ldc2 imm/lit. */
6869 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6870 else if ((op1 & 0x3e) == 0x00)
6871 return arm_copy_undef (gdbarch, insn, dsc);
6872 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6873 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6874 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6876 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6877 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6878 else if ((op1 & 0x30) == 0x20 && !op)
6879 {
6880 if ((coproc & 0xe) == 0xa)
6881 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6882 else
6883 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6884 }
6885 else if ((op1 & 0x30) == 0x20 && op)
6886 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6887 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6888 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6889 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6890 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6891 else if ((op1 & 0x30) == 0x30)
6892 return arm_copy_svc (gdbarch, insn, regs, dsc);
6893 else
6894 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6895 }
6896
6897 static int
6898 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6899 uint16_t insn2, struct regcache *regs,
6900 arm_displaced_step_closure *dsc)
6901 {
6902 unsigned int coproc = bits (insn2, 8, 11);
6903 unsigned int bit_5_8 = bits (insn1, 5, 8);
6904 unsigned int bit_9 = bit (insn1, 9);
6905 unsigned int bit_4 = bit (insn1, 4);
6906
6907 if (bit_9 == 0)
6908 {
6909 if (bit_5_8 == 2)
6910 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6911 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6912 dsc);
6913 else if (bit_5_8 == 0) /* UNDEFINED. */
6914 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6915 else
6916 {
6917 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6918 if ((coproc & 0xe) == 0xa)
6919 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6920 dsc);
6921 else /* coproc is not 101x. */
6922 {
6923 if (bit_4 == 0) /* STC/STC2. */
6924 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6925 "stc/stc2", dsc);
6926 else /* LDC/LDC2 {literal, immediate}. */
6927 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6928 regs, dsc);
6929 }
6930 }
6931 }
6932 else
6933 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6934
6935 return 0;
6936 }
6937
6938 static void
6939 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6940 arm_displaced_step_closure *dsc, int rd)
6941 {
6942 /* ADR Rd, #imm
6943
6944 Rewrite as:
6945
6946 Preparation: Rd <- PC
6947 Insn: ADD Rd, #imm
6948 Cleanup: Null.
6949 */
6950
6951 /* Rd <- PC */
6952 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6953 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6954 }
6955
6956 static int
6957 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6958 arm_displaced_step_closure *dsc,
6959 int rd, unsigned int imm)
6960 {
6961
6962 /* Encoding T2: ADDS Rd, #imm */
6963 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6964
6965 install_pc_relative (gdbarch, regs, dsc, rd);
6966
6967 return 0;
6968 }
6969
6970 static int
6971 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6972 struct regcache *regs,
6973 arm_displaced_step_closure *dsc)
6974 {
6975 unsigned int rd = bits (insn, 8, 10);
6976 unsigned int imm8 = bits (insn, 0, 7);
6977
6978 if (debug_displaced)
6979 fprintf_unfiltered (gdb_stdlog,
6980 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6981 rd, imm8, insn);
6982
6983 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6984 }
6985
6986 static int
6987 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6988 uint16_t insn2, struct regcache *regs,
6989 arm_displaced_step_closure *dsc)
6990 {
6991 unsigned int rd = bits (insn2, 8, 11);
6992 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6993 extract raw immediate encoding rather than computing immediate. When
6994 generating ADD or SUB instruction, we can simply perform OR operation to
6995 set immediate into ADD. */
6996 unsigned int imm_3_8 = insn2 & 0x70ff;
6997 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6998
6999 if (debug_displaced)
7000 fprintf_unfiltered (gdb_stdlog,
7001 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7002 rd, imm_i, imm_3_8, insn1, insn2);
7003
7004 if (bit (insn1, 7)) /* Encoding T2 */
7005 {
7006 /* Encoding T3: SUB Rd, Rd, #imm */
7007 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7008 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7009 }
7010 else /* Encoding T3 */
7011 {
7012 /* Encoding T3: ADD Rd, Rd, #imm */
7013 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7014 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7015 }
7016 dsc->numinsns = 2;
7017
7018 install_pc_relative (gdbarch, regs, dsc, rd);
7019
7020 return 0;
7021 }
7022
7023 static int
7024 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7025 struct regcache *regs,
7026 arm_displaced_step_closure *dsc)
7027 {
7028 unsigned int rt = bits (insn1, 8, 10);
7029 unsigned int pc;
7030 int imm8 = (bits (insn1, 0, 7) << 2);
7031
7032 /* LDR Rd, #imm8
7033
7034 Rwrite as:
7035
7036 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7037
7038 Insn: LDR R0, [R2, R3];
7039 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7040
7041 if (debug_displaced)
7042 fprintf_unfiltered (gdb_stdlog,
7043 "displaced: copying thumb ldr r%d [pc #%d]\n"
7044 , rt, imm8);
7045
7046 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7047 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7048 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7049 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7050 /* The assembler calculates the required value of the offset from the
7051 Align(PC,4) value of this instruction to the label. */
7052 pc = pc & 0xfffffffc;
7053
7054 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7055 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7056
7057 dsc->rd = rt;
7058 dsc->u.ldst.xfersize = 4;
7059 dsc->u.ldst.rn = 0;
7060 dsc->u.ldst.immed = 0;
7061 dsc->u.ldst.writeback = 0;
7062 dsc->u.ldst.restore_r4 = 0;
7063
7064 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7065
7066 dsc->cleanup = &cleanup_load;
7067
7068 return 0;
7069 }
7070
7071 /* Copy Thumb cbnz/cbz instruction. */
7072
7073 static int
7074 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7075 struct regcache *regs,
7076 arm_displaced_step_closure *dsc)
7077 {
7078 int non_zero = bit (insn1, 11);
7079 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7080 CORE_ADDR from = dsc->insn_addr;
7081 int rn = bits (insn1, 0, 2);
7082 int rn_val = displaced_read_reg (regs, dsc, rn);
7083
7084 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7085 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7086 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7087 condition is false, let it be, cleanup_branch will do nothing. */
7088 if (dsc->u.branch.cond)
7089 {
7090 dsc->u.branch.cond = INST_AL;
7091 dsc->u.branch.dest = from + 4 + imm5;
7092 }
7093 else
7094 dsc->u.branch.dest = from + 2;
7095
7096 dsc->u.branch.link = 0;
7097 dsc->u.branch.exchange = 0;
7098
7099 if (debug_displaced)
7100 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7101 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7102 rn, rn_val, insn1, dsc->u.branch.dest);
7103
7104 dsc->modinsn[0] = THUMB_NOP;
7105
7106 dsc->cleanup = &cleanup_branch;
7107 return 0;
7108 }
7109
7110 /* Copy Table Branch Byte/Halfword */
7111 static int
7112 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7113 uint16_t insn2, struct regcache *regs,
7114 arm_displaced_step_closure *dsc)
7115 {
7116 ULONGEST rn_val, rm_val;
7117 int is_tbh = bit (insn2, 4);
7118 CORE_ADDR halfwords = 0;
7119 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7120
7121 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7122 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7123
7124 if (is_tbh)
7125 {
7126 gdb_byte buf[2];
7127
7128 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7129 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7130 }
7131 else
7132 {
7133 gdb_byte buf[1];
7134
7135 target_read_memory (rn_val + rm_val, buf, 1);
7136 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7137 }
7138
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7141 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7142 (unsigned int) rn_val, (unsigned int) rm_val,
7143 (unsigned int) halfwords);
7144
7145 dsc->u.branch.cond = INST_AL;
7146 dsc->u.branch.link = 0;
7147 dsc->u.branch.exchange = 0;
7148 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7149
7150 dsc->cleanup = &cleanup_branch;
7151
7152 return 0;
7153 }
7154
7155 static void
7156 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7157 arm_displaced_step_closure *dsc)
7158 {
7159 /* PC <- r7 */
7160 int val = displaced_read_reg (regs, dsc, 7);
7161 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7162
7163 /* r7 <- r8 */
7164 val = displaced_read_reg (regs, dsc, 8);
7165 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7166
7167 /* r8 <- tmp[0] */
7168 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7169
7170 }
7171
7172 static int
7173 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7174 struct regcache *regs,
7175 arm_displaced_step_closure *dsc)
7176 {
7177 dsc->u.block.regmask = insn1 & 0x00ff;
7178
7179 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7180 to :
7181
7182 (1) register list is full, that is, r0-r7 are used.
7183 Prepare: tmp[0] <- r8
7184
7185 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7186 MOV r8, r7; Move value of r7 to r8;
7187 POP {r7}; Store PC value into r7.
7188
7189 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7190
7191 (2) register list is not full, supposing there are N registers in
7192 register list (except PC, 0 <= N <= 7).
7193 Prepare: for each i, 0 - N, tmp[i] <- ri.
7194
7195 POP {r0, r1, ...., rN};
7196
7197 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7198 from tmp[] properly.
7199 */
7200 if (debug_displaced)
7201 fprintf_unfiltered (gdb_stdlog,
7202 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7203 dsc->u.block.regmask, insn1);
7204
7205 if (dsc->u.block.regmask == 0xff)
7206 {
7207 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7208
7209 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7210 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7211 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7212
7213 dsc->numinsns = 3;
7214 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7215 }
7216 else
7217 {
7218 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7219 unsigned int i;
7220 unsigned int new_regmask;
7221
7222 for (i = 0; i < num_in_list + 1; i++)
7223 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7224
7225 new_regmask = (1 << (num_in_list + 1)) - 1;
7226
7227 if (debug_displaced)
7228 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7229 "{..., pc}: original reg list %.4x,"
7230 " modified list %.4x\n"),
7231 (int) dsc->u.block.regmask, new_regmask);
7232
7233 dsc->u.block.regmask |= 0x8000;
7234 dsc->u.block.writeback = 0;
7235 dsc->u.block.cond = INST_AL;
7236
7237 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7238
7239 dsc->cleanup = &cleanup_block_load_pc;
7240 }
7241
7242 return 0;
7243 }
7244
7245 static void
7246 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7247 struct regcache *regs,
7248 arm_displaced_step_closure *dsc)
7249 {
7250 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7251 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7252 int err = 0;
7253
7254 /* 16-bit thumb instructions. */
7255 switch (op_bit_12_15)
7256 {
7257 /* Shift (imme), add, subtract, move and compare. */
7258 case 0: case 1: case 2: case 3:
7259 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7260 "shift/add/sub/mov/cmp",
7261 dsc);
7262 break;
7263 case 4:
7264 switch (op_bit_10_11)
7265 {
7266 case 0: /* Data-processing */
7267 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7268 "data-processing",
7269 dsc);
7270 break;
7271 case 1: /* Special data instructions and branch and exchange. */
7272 {
7273 unsigned short op = bits (insn1, 7, 9);
7274 if (op == 6 || op == 7) /* BX or BLX */
7275 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7276 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7277 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7278 else
7279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7280 dsc);
7281 }
7282 break;
7283 default: /* LDR (literal) */
7284 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7285 }
7286 break;
7287 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7289 break;
7290 case 10:
7291 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7292 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7293 else /* Generate SP-relative address */
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7295 break;
7296 case 11: /* Misc 16-bit instructions */
7297 {
7298 switch (bits (insn1, 8, 11))
7299 {
7300 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7301 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7302 break;
7303 case 12: case 13: /* POP */
7304 if (bit (insn1, 8)) /* PC is in register list. */
7305 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7306 else
7307 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7308 break;
7309 case 15: /* If-Then, and hints */
7310 if (bits (insn1, 0, 3))
7311 /* If-Then makes up to four following instructions conditional.
7312 IT instruction itself is not conditional, so handle it as a
7313 common unmodified instruction. */
7314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7315 dsc);
7316 else
7317 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7318 break;
7319 default:
7320 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7321 }
7322 }
7323 break;
7324 case 12:
7325 if (op_bit_10_11 < 2) /* Store multiple registers */
7326 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7327 else /* Load multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7329 break;
7330 case 13: /* Conditional branch and supervisor call */
7331 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7332 err = thumb_copy_b (gdbarch, insn1, dsc);
7333 else
7334 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7335 break;
7336 case 14: /* Unconditional branch */
7337 err = thumb_copy_b (gdbarch, insn1, dsc);
7338 break;
7339 default:
7340 err = 1;
7341 }
7342
7343 if (err)
7344 internal_error (__FILE__, __LINE__,
7345 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7346 }
7347
7348 static int
7349 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7350 uint16_t insn1, uint16_t insn2,
7351 struct regcache *regs,
7352 arm_displaced_step_closure *dsc)
7353 {
7354 int rt = bits (insn2, 12, 15);
7355 int rn = bits (insn1, 0, 3);
7356 int op1 = bits (insn1, 7, 8);
7357
7358 switch (bits (insn1, 5, 6))
7359 {
7360 case 0: /* Load byte and memory hints */
7361 if (rt == 0xf) /* PLD/PLI */
7362 {
7363 if (rn == 0xf)
7364 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7365 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7366 else
7367 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7368 "pli/pld", dsc);
7369 }
7370 else
7371 {
7372 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7374 1);
7375 else
7376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7377 "ldrb{reg, immediate}/ldrbt",
7378 dsc);
7379 }
7380
7381 break;
7382 case 1: /* Load halfword and memory hints. */
7383 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7384 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7385 "pld/unalloc memhint", dsc);
7386 else
7387 {
7388 if (rn == 0xf)
7389 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7390 2);
7391 else
7392 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 "ldrh/ldrht", dsc);
7394 }
7395 break;
7396 case 2: /* Load word */
7397 {
7398 int insn2_bit_8_11 = bits (insn2, 8, 11);
7399
7400 if (rn == 0xf)
7401 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7402 else if (op1 == 0x1) /* Encoding T3 */
7403 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7404 0, 1);
7405 else /* op1 == 0x0 */
7406 {
7407 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7408 /* LDR (immediate) */
7409 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7410 dsc, bit (insn2, 8), 1);
7411 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 "ldrt", dsc);
7414 else
7415 /* LDR (register) */
7416 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7417 dsc, 0, 0);
7418 }
7419 break;
7420 }
7421 default:
7422 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7423 break;
7424 }
7425 return 0;
7426 }
7427
7428 static void
7429 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7430 uint16_t insn2, struct regcache *regs,
7431 arm_displaced_step_closure *dsc)
7432 {
7433 int err = 0;
7434 unsigned short op = bit (insn2, 15);
7435 unsigned int op1 = bits (insn1, 11, 12);
7436
7437 switch (op1)
7438 {
7439 case 1:
7440 {
7441 switch (bits (insn1, 9, 10))
7442 {
7443 case 0:
7444 if (bit (insn1, 6))
7445 {
7446 /* Load/store {dual, exclusive}, table branch. */
7447 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7448 && bits (insn2, 5, 7) == 0)
7449 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7450 dsc);
7451 else
7452 /* PC is not allowed to use in load/store {dual, exclusive}
7453 instructions. */
7454 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7455 "load/store dual/ex", dsc);
7456 }
7457 else /* load/store multiple */
7458 {
7459 switch (bits (insn1, 7, 8))
7460 {
7461 case 0: case 3: /* SRS, RFE */
7462 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7463 "srs/rfe", dsc);
7464 break;
7465 case 1: case 2: /* LDM/STM/PUSH/POP */
7466 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7467 break;
7468 }
7469 }
7470 break;
7471
7472 case 1:
7473 /* Data-processing (shift register). */
7474 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7475 dsc);
7476 break;
7477 default: /* Coprocessor instructions. */
7478 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7479 break;
7480 }
7481 break;
7482 }
7483 case 2: /* op1 = 2 */
7484 if (op) /* Branch and misc control. */
7485 {
7486 if (bit (insn2, 14) /* BLX/BL */
7487 || bit (insn2, 12) /* Unconditional branch */
7488 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7489 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7490 else
7491 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7492 "misc ctrl", dsc);
7493 }
7494 else
7495 {
7496 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7497 {
7498 int dp_op = bits (insn1, 4, 8);
7499 int rn = bits (insn1, 0, 3);
7500 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7501 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7502 regs, dsc);
7503 else
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "dp/pb", dsc);
7506 }
7507 else /* Data processing (modified immediate) */
7508 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7509 "dp/mi", dsc);
7510 }
7511 break;
7512 case 3: /* op1 = 3 */
7513 switch (bits (insn1, 9, 10))
7514 {
7515 case 0:
7516 if (bit (insn1, 4))
7517 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7518 regs, dsc);
7519 else /* NEON Load/Store and Store single data item */
7520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7521 "neon elt/struct load/store",
7522 dsc);
7523 break;
7524 case 1: /* op1 = 3, bits (9, 10) == 1 */
7525 switch (bits (insn1, 7, 8))
7526 {
7527 case 0: case 1: /* Data processing (register) */
7528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 "dp(reg)", dsc);
7530 break;
7531 case 2: /* Multiply and absolute difference */
7532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7533 "mul/mua/diff", dsc);
7534 break;
7535 case 3: /* Long multiply and divide */
7536 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7537 "lmul/lmua", dsc);
7538 break;
7539 }
7540 break;
7541 default: /* Coprocessor instructions */
7542 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7543 break;
7544 }
7545 break;
7546 default:
7547 err = 1;
7548 }
7549
7550 if (err)
7551 internal_error (__FILE__, __LINE__,
7552 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7553
7554 }
7555
7556 static void
7557 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7558 struct regcache *regs,
7559 arm_displaced_step_closure *dsc)
7560 {
7561 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7562 uint16_t insn1
7563 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7564
7565 if (debug_displaced)
7566 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7567 "at %.8lx\n", insn1, (unsigned long) from);
7568
7569 dsc->is_thumb = 1;
7570 dsc->insn_size = thumb_insn_size (insn1);
7571 if (thumb_insn_size (insn1) == 4)
7572 {
7573 uint16_t insn2
7574 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7575 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7576 }
7577 else
7578 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7579 }
7580
7581 void
7582 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7583 CORE_ADDR to, struct regcache *regs,
7584 arm_displaced_step_closure *dsc)
7585 {
7586 int err = 0;
7587 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7588 uint32_t insn;
7589
7590 /* Most displaced instructions use a 1-instruction scratch space, so set this
7591 here and override below if/when necessary. */
7592 dsc->numinsns = 1;
7593 dsc->insn_addr = from;
7594 dsc->scratch_base = to;
7595 dsc->cleanup = NULL;
7596 dsc->wrote_to_pc = 0;
7597
7598 if (!displaced_in_arm_mode (regs))
7599 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7600
7601 dsc->is_thumb = 0;
7602 dsc->insn_size = 4;
7603 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7604 if (debug_displaced)
7605 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7606 "at %.8lx\n", (unsigned long) insn,
7607 (unsigned long) from);
7608
7609 if ((insn & 0xf0000000) == 0xf0000000)
7610 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7611 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7612 {
7613 case 0x0: case 0x1: case 0x2: case 0x3:
7614 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7615 break;
7616
7617 case 0x4: case 0x5: case 0x6:
7618 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7619 break;
7620
7621 case 0x7:
7622 err = arm_decode_media (gdbarch, insn, dsc);
7623 break;
7624
7625 case 0x8: case 0x9: case 0xa: case 0xb:
7626 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7627 break;
7628
7629 case 0xc: case 0xd: case 0xe: case 0xf:
7630 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7631 break;
7632 }
7633
7634 if (err)
7635 internal_error (__FILE__, __LINE__,
7636 _("arm_process_displaced_insn: Instruction decode error"));
7637 }
7638
7639 /* Actually set up the scratch space for a displaced instruction. */
7640
7641 void
7642 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7643 CORE_ADDR to, arm_displaced_step_closure *dsc)
7644 {
7645 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7646 unsigned int i, len, offset;
7647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7648 int size = dsc->is_thumb? 2 : 4;
7649 const gdb_byte *bkp_insn;
7650
7651 offset = 0;
7652 /* Poke modified instruction(s). */
7653 for (i = 0; i < dsc->numinsns; i++)
7654 {
7655 if (debug_displaced)
7656 {
7657 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7658 if (size == 4)
7659 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7660 dsc->modinsn[i]);
7661 else if (size == 2)
7662 fprintf_unfiltered (gdb_stdlog, "%.4x",
7663 (unsigned short)dsc->modinsn[i]);
7664
7665 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7666 (unsigned long) to + offset);
7667
7668 }
7669 write_memory_unsigned_integer (to + offset, size,
7670 byte_order_for_code,
7671 dsc->modinsn[i]);
7672 offset += size;
7673 }
7674
7675 /* Choose the correct breakpoint instruction. */
7676 if (dsc->is_thumb)
7677 {
7678 bkp_insn = tdep->thumb_breakpoint;
7679 len = tdep->thumb_breakpoint_size;
7680 }
7681 else
7682 {
7683 bkp_insn = tdep->arm_breakpoint;
7684 len = tdep->arm_breakpoint_size;
7685 }
7686
7687 /* Put breakpoint afterwards. */
7688 write_memory (to + offset, bkp_insn, len);
7689
7690 if (debug_displaced)
7691 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7692 paddress (gdbarch, from), paddress (gdbarch, to));
7693 }
7694
7695 /* Entry point for cleaning things up after a displaced instruction has been
7696 single-stepped. */
7697
7698 void
7699 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7700 struct displaced_step_closure *dsc_,
7701 CORE_ADDR from, CORE_ADDR to,
7702 struct regcache *regs)
7703 {
7704 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7705
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7708
7709 if (!dsc->wrote_to_pc)
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7712
7713 }
7714
7715 #include "bfd-in2.h"
7716 #include "libcoff.h"
7717
7718 static int
7719 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7720 {
7721 gdb_disassembler *di
7722 = static_cast<gdb_disassembler *>(info->application_data);
7723 struct gdbarch *gdbarch = di->arch ();
7724
7725 if (arm_pc_is_thumb (gdbarch, memaddr))
7726 {
7727 static asymbol *asym;
7728 static combined_entry_type ce;
7729 static struct coff_symbol_struct csym;
7730 static struct bfd fake_bfd;
7731 static bfd_target fake_target;
7732
7733 if (csym.native == NULL)
7734 {
7735 /* Create a fake symbol vector containing a Thumb symbol.
7736 This is solely so that the code in print_insn_little_arm()
7737 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7738 the presence of a Thumb symbol and switch to decoding
7739 Thumb instructions. */
7740
7741 fake_target.flavour = bfd_target_coff_flavour;
7742 fake_bfd.xvec = &fake_target;
7743 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7744 csym.native = &ce;
7745 csym.symbol.the_bfd = &fake_bfd;
7746 csym.symbol.name = "fake";
7747 asym = (asymbol *) & csym;
7748 }
7749
7750 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7751 info->symbols = &asym;
7752 }
7753 else
7754 info->symbols = NULL;
7755
7756 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7757 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7758 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7759 the assert on the mismatch of info->mach and
7760 bfd_get_mach (current_program_space->exec_bfd ()) in
7761 default_print_insn. */
7762 if (current_program_space->exec_bfd () != NULL)
7763 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7764
7765 return default_print_insn (memaddr, info);
7766 }
7767
7768 /* The following define instruction sequences that will cause ARM
7769 cpu's to take an undefined instruction trap. These are used to
7770 signal a breakpoint to GDB.
7771
7772 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7773 modes. A different instruction is required for each mode. The ARM
7774 cpu's can also be big or little endian. Thus four different
7775 instructions are needed to support all cases.
7776
7777 Note: ARMv4 defines several new instructions that will take the
7778 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7779 not in fact add the new instructions. The new undefined
7780 instructions in ARMv4 are all instructions that had no defined
7781 behaviour in earlier chips. There is no guarantee that they will
7782 raise an exception, but may be treated as NOP's. In practice, it
7783 may only safe to rely on instructions matching:
7784
7785 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7786 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7787 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7788
7789 Even this may only true if the condition predicate is true. The
7790 following use a condition predicate of ALWAYS so it is always TRUE.
7791
7792 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7793 and NetBSD all use a software interrupt rather than an undefined
7794 instruction to force a trap. This can be handled by by the
7795 abi-specific code during establishment of the gdbarch vector. */
7796
7797 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7798 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7799 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7800 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7801
7802 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7803 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7804 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7805 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7806
7807 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7808
7809 static int
7810 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7811 {
7812 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7813 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7814
7815 if (arm_pc_is_thumb (gdbarch, *pcptr))
7816 {
7817 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7818
7819 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7820 check whether we are replacing a 32-bit instruction. */
7821 if (tdep->thumb2_breakpoint != NULL)
7822 {
7823 gdb_byte buf[2];
7824
7825 if (target_read_memory (*pcptr, buf, 2) == 0)
7826 {
7827 unsigned short inst1;
7828
7829 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7830 if (thumb_insn_size (inst1) == 4)
7831 return ARM_BP_KIND_THUMB2;
7832 }
7833 }
7834
7835 return ARM_BP_KIND_THUMB;
7836 }
7837 else
7838 return ARM_BP_KIND_ARM;
7839
7840 }
7841
7842 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7843
7844 static const gdb_byte *
7845 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7846 {
7847 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7848
7849 switch (kind)
7850 {
7851 case ARM_BP_KIND_ARM:
7852 *size = tdep->arm_breakpoint_size;
7853 return tdep->arm_breakpoint;
7854 case ARM_BP_KIND_THUMB:
7855 *size = tdep->thumb_breakpoint_size;
7856 return tdep->thumb_breakpoint;
7857 case ARM_BP_KIND_THUMB2:
7858 *size = tdep->thumb2_breakpoint_size;
7859 return tdep->thumb2_breakpoint;
7860 default:
7861 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7862 }
7863 }
7864
7865 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7866
7867 static int
7868 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7869 struct regcache *regcache,
7870 CORE_ADDR *pcptr)
7871 {
7872 gdb_byte buf[4];
7873
7874 /* Check the memory pointed by PC is readable. */
7875 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7876 {
7877 struct arm_get_next_pcs next_pcs_ctx;
7878
7879 arm_get_next_pcs_ctor (&next_pcs_ctx,
7880 &arm_get_next_pcs_ops,
7881 gdbarch_byte_order (gdbarch),
7882 gdbarch_byte_order_for_code (gdbarch),
7883 0,
7884 regcache);
7885
7886 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7887
7888 /* If MEMADDR is the next instruction of current pc, do the
7889 software single step computation, and get the thumb mode by
7890 the destination address. */
7891 for (CORE_ADDR pc : next_pcs)
7892 {
7893 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7894 {
7895 if (IS_THUMB_ADDR (pc))
7896 {
7897 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7898 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7899 }
7900 else
7901 return ARM_BP_KIND_ARM;
7902 }
7903 }
7904 }
7905
7906 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7907 }
7908
7909 /* Extract from an array REGBUF containing the (raw) register state a
7910 function return value of type TYPE, and copy that, in virtual
7911 format, into VALBUF. */
7912
7913 static void
7914 arm_extract_return_value (struct type *type, struct regcache *regs,
7915 gdb_byte *valbuf)
7916 {
7917 struct gdbarch *gdbarch = regs->arch ();
7918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7919
7920 if (TYPE_CODE_FLT == type->code ())
7921 {
7922 switch (gdbarch_tdep (gdbarch)->fp_model)
7923 {
7924 case ARM_FLOAT_FPA:
7925 {
7926 /* The value is in register F0 in internal format. We need to
7927 extract the raw value and then convert it to the desired
7928 internal type. */
7929 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7930
7931 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7932 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7933 valbuf, type);
7934 }
7935 break;
7936
7937 case ARM_FLOAT_SOFT_FPA:
7938 case ARM_FLOAT_SOFT_VFP:
7939 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7940 not using the VFP ABI code. */
7941 case ARM_FLOAT_VFP:
7942 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7943 if (TYPE_LENGTH (type) > 4)
7944 regs->cooked_read (ARM_A1_REGNUM + 1,
7945 valbuf + ARM_INT_REGISTER_SIZE);
7946 break;
7947
7948 default:
7949 internal_error (__FILE__, __LINE__,
7950 _("arm_extract_return_value: "
7951 "Floating point model not supported"));
7952 break;
7953 }
7954 }
7955 else if (type->code () == TYPE_CODE_INT
7956 || type->code () == TYPE_CODE_CHAR
7957 || type->code () == TYPE_CODE_BOOL
7958 || type->code () == TYPE_CODE_PTR
7959 || TYPE_IS_REFERENCE (type)
7960 || type->code () == TYPE_CODE_ENUM)
7961 {
7962 /* If the type is a plain integer, then the access is
7963 straight-forward. Otherwise we have to play around a bit
7964 more. */
7965 int len = TYPE_LENGTH (type);
7966 int regno = ARM_A1_REGNUM;
7967 ULONGEST tmp;
7968
7969 while (len > 0)
7970 {
7971 /* By using store_unsigned_integer we avoid having to do
7972 anything special for small big-endian values. */
7973 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7974 store_unsigned_integer (valbuf,
7975 (len > ARM_INT_REGISTER_SIZE
7976 ? ARM_INT_REGISTER_SIZE : len),
7977 byte_order, tmp);
7978 len -= ARM_INT_REGISTER_SIZE;
7979 valbuf += ARM_INT_REGISTER_SIZE;
7980 }
7981 }
7982 else
7983 {
7984 /* For a structure or union the behaviour is as if the value had
7985 been stored to word-aligned memory and then loaded into
7986 registers with 32-bit load instruction(s). */
7987 int len = TYPE_LENGTH (type);
7988 int regno = ARM_A1_REGNUM;
7989 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7990
7991 while (len > 0)
7992 {
7993 regs->cooked_read (regno++, tmpbuf);
7994 memcpy (valbuf, tmpbuf,
7995 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7996 len -= ARM_INT_REGISTER_SIZE;
7997 valbuf += ARM_INT_REGISTER_SIZE;
7998 }
7999 }
8000 }
8001
8002
8003 /* Will a function return an aggregate type in memory or in a
8004 register? Return 0 if an aggregate type can be returned in a
8005 register, 1 if it must be returned in memory. */
8006
8007 static int
8008 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8009 {
8010 enum type_code code;
8011
8012 type = check_typedef (type);
8013
8014 /* Simple, non-aggregate types (ie not including vectors and
8015 complex) are always returned in a register (or registers). */
8016 code = type->code ();
8017 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8018 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8019 return 0;
8020
8021 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8022 {
8023 /* Vector values should be returned using ARM registers if they
8024 are not over 16 bytes. */
8025 return (TYPE_LENGTH (type) > 16);
8026 }
8027
8028 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8029 {
8030 /* The AAPCS says all aggregates not larger than a word are returned
8031 in a register. */
8032 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8033 return 0;
8034
8035 return 1;
8036 }
8037 else
8038 {
8039 int nRc;
8040
8041 /* All aggregate types that won't fit in a register must be returned
8042 in memory. */
8043 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8044 return 1;
8045
8046 /* In the ARM ABI, "integer" like aggregate types are returned in
8047 registers. For an aggregate type to be integer like, its size
8048 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8049 offset of each addressable subfield must be zero. Note that bit
8050 fields are not addressable, and all addressable subfields of
8051 unions always start at offset zero.
8052
8053 This function is based on the behaviour of GCC 2.95.1.
8054 See: gcc/arm.c: arm_return_in_memory() for details.
8055
8056 Note: All versions of GCC before GCC 2.95.2 do not set up the
8057 parameters correctly for a function returning the following
8058 structure: struct { float f;}; This should be returned in memory,
8059 not a register. Richard Earnshaw sent me a patch, but I do not
8060 know of any way to detect if a function like the above has been
8061 compiled with the correct calling convention. */
8062
8063 /* Assume all other aggregate types can be returned in a register.
8064 Run a check for structures, unions and arrays. */
8065 nRc = 0;
8066
8067 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8068 {
8069 int i;
8070 /* Need to check if this struct/union is "integer" like. For
8071 this to be true, its size must be less than or equal to
8072 ARM_INT_REGISTER_SIZE and the offset of each addressable
8073 subfield must be zero. Note that bit fields are not
8074 addressable, and unions always start at offset zero. If any
8075 of the subfields is a floating point type, the struct/union
8076 cannot be an integer type. */
8077
8078 /* For each field in the object, check:
8079 1) Is it FP? --> yes, nRc = 1;
8080 2) Is it addressable (bitpos != 0) and
8081 not packed (bitsize == 0)?
8082 --> yes, nRc = 1
8083 */
8084
8085 for (i = 0; i < type->num_fields (); i++)
8086 {
8087 enum type_code field_type_code;
8088
8089 field_type_code
8090 = check_typedef (type->field (i).type ())->code ();
8091
8092 /* Is it a floating point type field? */
8093 if (field_type_code == TYPE_CODE_FLT)
8094 {
8095 nRc = 1;
8096 break;
8097 }
8098
8099 /* If bitpos != 0, then we have to care about it. */
8100 if (TYPE_FIELD_BITPOS (type, i) != 0)
8101 {
8102 /* Bitfields are not addressable. If the field bitsize is
8103 zero, then the field is not packed. Hence it cannot be
8104 a bitfield or any other packed type. */
8105 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8106 {
8107 nRc = 1;
8108 break;
8109 }
8110 }
8111 }
8112 }
8113
8114 return nRc;
8115 }
8116 }
8117
8118 /* Write into appropriate registers a function return value of type
8119 TYPE, given in virtual format. */
8120
8121 static void
8122 arm_store_return_value (struct type *type, struct regcache *regs,
8123 const gdb_byte *valbuf)
8124 {
8125 struct gdbarch *gdbarch = regs->arch ();
8126 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8127
8128 if (type->code () == TYPE_CODE_FLT)
8129 {
8130 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8131
8132 switch (gdbarch_tdep (gdbarch)->fp_model)
8133 {
8134 case ARM_FLOAT_FPA:
8135
8136 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8137 regs->cooked_write (ARM_F0_REGNUM, buf);
8138 break;
8139
8140 case ARM_FLOAT_SOFT_FPA:
8141 case ARM_FLOAT_SOFT_VFP:
8142 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8143 not using the VFP ABI code. */
8144 case ARM_FLOAT_VFP:
8145 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8146 if (TYPE_LENGTH (type) > 4)
8147 regs->cooked_write (ARM_A1_REGNUM + 1,
8148 valbuf + ARM_INT_REGISTER_SIZE);
8149 break;
8150
8151 default:
8152 internal_error (__FILE__, __LINE__,
8153 _("arm_store_return_value: Floating "
8154 "point model not supported"));
8155 break;
8156 }
8157 }
8158 else if (type->code () == TYPE_CODE_INT
8159 || type->code () == TYPE_CODE_CHAR
8160 || type->code () == TYPE_CODE_BOOL
8161 || type->code () == TYPE_CODE_PTR
8162 || TYPE_IS_REFERENCE (type)
8163 || type->code () == TYPE_CODE_ENUM)
8164 {
8165 if (TYPE_LENGTH (type) <= 4)
8166 {
8167 /* Values of one word or less are zero/sign-extended and
8168 returned in r0. */
8169 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8170 LONGEST val = unpack_long (type, valbuf);
8171
8172 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8173 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8174 }
8175 else
8176 {
8177 /* Integral values greater than one word are stored in consecutive
8178 registers starting with r0. This will always be a multiple of
8179 the regiser size. */
8180 int len = TYPE_LENGTH (type);
8181 int regno = ARM_A1_REGNUM;
8182
8183 while (len > 0)
8184 {
8185 regs->cooked_write (regno++, valbuf);
8186 len -= ARM_INT_REGISTER_SIZE;
8187 valbuf += ARM_INT_REGISTER_SIZE;
8188 }
8189 }
8190 }
8191 else
8192 {
8193 /* For a structure or union the behaviour is as if the value had
8194 been stored to word-aligned memory and then loaded into
8195 registers with 32-bit load instruction(s). */
8196 int len = TYPE_LENGTH (type);
8197 int regno = ARM_A1_REGNUM;
8198 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8199
8200 while (len > 0)
8201 {
8202 memcpy (tmpbuf, valbuf,
8203 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8204 regs->cooked_write (regno++, tmpbuf);
8205 len -= ARM_INT_REGISTER_SIZE;
8206 valbuf += ARM_INT_REGISTER_SIZE;
8207 }
8208 }
8209 }
8210
8211
8212 /* Handle function return values. */
8213
8214 static enum return_value_convention
8215 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8216 struct type *valtype, struct regcache *regcache,
8217 gdb_byte *readbuf, const gdb_byte *writebuf)
8218 {
8219 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8220 struct type *func_type = function ? value_type (function) : NULL;
8221 enum arm_vfp_cprc_base_type vfp_base_type;
8222 int vfp_base_count;
8223
8224 if (arm_vfp_abi_for_function (gdbarch, func_type)
8225 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8226 {
8227 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8228 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8229 int i;
8230 for (i = 0; i < vfp_base_count; i++)
8231 {
8232 if (reg_char == 'q')
8233 {
8234 if (writebuf)
8235 arm_neon_quad_write (gdbarch, regcache, i,
8236 writebuf + i * unit_length);
8237
8238 if (readbuf)
8239 arm_neon_quad_read (gdbarch, regcache, i,
8240 readbuf + i * unit_length);
8241 }
8242 else
8243 {
8244 char name_buf[4];
8245 int regnum;
8246
8247 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8248 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8249 strlen (name_buf));
8250 if (writebuf)
8251 regcache->cooked_write (regnum, writebuf + i * unit_length);
8252 if (readbuf)
8253 regcache->cooked_read (regnum, readbuf + i * unit_length);
8254 }
8255 }
8256 return RETURN_VALUE_REGISTER_CONVENTION;
8257 }
8258
8259 if (valtype->code () == TYPE_CODE_STRUCT
8260 || valtype->code () == TYPE_CODE_UNION
8261 || valtype->code () == TYPE_CODE_ARRAY)
8262 {
8263 if (tdep->struct_return == pcc_struct_return
8264 || arm_return_in_memory (gdbarch, valtype))
8265 return RETURN_VALUE_STRUCT_CONVENTION;
8266 }
8267 else if (valtype->code () == TYPE_CODE_COMPLEX)
8268 {
8269 if (arm_return_in_memory (gdbarch, valtype))
8270 return RETURN_VALUE_STRUCT_CONVENTION;
8271 }
8272
8273 if (writebuf)
8274 arm_store_return_value (valtype, regcache, writebuf);
8275
8276 if (readbuf)
8277 arm_extract_return_value (valtype, regcache, readbuf);
8278
8279 return RETURN_VALUE_REGISTER_CONVENTION;
8280 }
8281
8282
8283 static int
8284 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8285 {
8286 struct gdbarch *gdbarch = get_frame_arch (frame);
8287 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8288 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8289 CORE_ADDR jb_addr;
8290 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8291
8292 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8293
8294 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8295 ARM_INT_REGISTER_SIZE))
8296 return 0;
8297
8298 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8299 return 1;
8300 }
8301 /* A call to cmse secure entry function "foo" at "a" is modified by
8302 GNU ld as "b".
8303 a) bl xxxx <foo>
8304
8305 <foo>
8306 xxxx:
8307
8308 b) bl yyyy <__acle_se_foo>
8309
8310 section .gnu.sgstubs:
8311 <foo>
8312 yyyy: sg // secure gateway
8313 b.w xxxx <__acle_se_foo> // original_branch_dest
8314
8315 <__acle_se_foo>
8316 xxxx:
8317
8318 When the control at "b", the pc contains "yyyy" (sg address) which is a
8319 trampoline and does not exist in source code. This function returns the
8320 target pc "xxxx". For more details please refer to section 5.4
8321 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8322 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8323 document on www.developer.arm.com. */
8324
8325 static CORE_ADDR
8326 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8327 {
8328 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8329 char *target_name = (char *) alloca (target_len);
8330 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8331
8332 struct bound_minimal_symbol minsym
8333 = lookup_minimal_symbol (target_name, NULL, objfile);
8334
8335 if (minsym.minsym != nullptr)
8336 return BMSYMBOL_VALUE_ADDRESS (minsym);
8337
8338 return 0;
8339 }
8340
8341 /* Return true when SEC points to ".gnu.sgstubs" section. */
8342
8343 static bool
8344 arm_is_sgstubs_section (struct obj_section *sec)
8345 {
8346 return (sec != nullptr
8347 && sec->the_bfd_section != nullptr
8348 && sec->the_bfd_section->name != nullptr
8349 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8350 }
8351
8352 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8353 return the target PC. Otherwise return 0. */
8354
8355 CORE_ADDR
8356 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8357 {
8358 const char *name;
8359 int namelen;
8360 CORE_ADDR start_addr;
8361
8362 /* Find the starting address and name of the function containing the PC. */
8363 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8364 {
8365 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8366 check here. */
8367 start_addr = arm_skip_bx_reg (frame, pc);
8368 if (start_addr != 0)
8369 return start_addr;
8370
8371 return 0;
8372 }
8373
8374 /* If PC is in a Thumb call or return stub, return the address of the
8375 target PC, which is in a register. The thunk functions are called
8376 _call_via_xx, where x is the register name. The possible names
8377 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8378 functions, named __ARM_call_via_r[0-7]. */
8379 if (startswith (name, "_call_via_")
8380 || startswith (name, "__ARM_call_via_"))
8381 {
8382 /* Use the name suffix to determine which register contains the
8383 target PC. */
8384 static const char *table[15] =
8385 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8386 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8387 };
8388 int regno;
8389 int offset = strlen (name) - 2;
8390
8391 for (regno = 0; regno <= 14; regno++)
8392 if (strcmp (&name[offset], table[regno]) == 0)
8393 return get_frame_register_unsigned (frame, regno);
8394 }
8395
8396 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8397 non-interworking calls to foo. We could decode the stubs
8398 to find the target but it's easier to use the symbol table. */
8399 namelen = strlen (name);
8400 if (name[0] == '_' && name[1] == '_'
8401 && ((namelen > 2 + strlen ("_from_thumb")
8402 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8403 || (namelen > 2 + strlen ("_from_arm")
8404 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8405 {
8406 char *target_name;
8407 int target_len = namelen - 2;
8408 struct bound_minimal_symbol minsym;
8409 struct objfile *objfile;
8410 struct obj_section *sec;
8411
8412 if (name[namelen - 1] == 'b')
8413 target_len -= strlen ("_from_thumb");
8414 else
8415 target_len -= strlen ("_from_arm");
8416
8417 target_name = (char *) alloca (target_len + 1);
8418 memcpy (target_name, name + 2, target_len);
8419 target_name[target_len] = '\0';
8420
8421 sec = find_pc_section (pc);
8422 objfile = (sec == NULL) ? NULL : sec->objfile;
8423 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8424 if (minsym.minsym != NULL)
8425 return BMSYMBOL_VALUE_ADDRESS (minsym);
8426 else
8427 return 0;
8428 }
8429
8430 struct obj_section *section = find_pc_section (pc);
8431
8432 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8433 if (arm_is_sgstubs_section (section))
8434 return arm_skip_cmse_entry (pc, name, section->objfile);
8435
8436 return 0; /* not a stub */
8437 }
8438
8439 static void
8440 arm_update_current_architecture (void)
8441 {
8442 struct gdbarch_info info;
8443
8444 /* If the current architecture is not ARM, we have nothing to do. */
8445 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8446 return;
8447
8448 /* Update the architecture. */
8449 gdbarch_info_init (&info);
8450
8451 if (!gdbarch_update_p (info))
8452 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8453 }
8454
8455 static void
8456 set_fp_model_sfunc (const char *args, int from_tty,
8457 struct cmd_list_element *c)
8458 {
8459 int fp_model;
8460
8461 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8462 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8463 {
8464 arm_fp_model = (enum arm_float_model) fp_model;
8465 break;
8466 }
8467
8468 if (fp_model == ARM_FLOAT_LAST)
8469 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8470 current_fp_model);
8471
8472 arm_update_current_architecture ();
8473 }
8474
8475 static void
8476 show_fp_model (struct ui_file *file, int from_tty,
8477 struct cmd_list_element *c, const char *value)
8478 {
8479 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8480
8481 if (arm_fp_model == ARM_FLOAT_AUTO
8482 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8483 fprintf_filtered (file, _("\
8484 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8485 fp_model_strings[tdep->fp_model]);
8486 else
8487 fprintf_filtered (file, _("\
8488 The current ARM floating point model is \"%s\".\n"),
8489 fp_model_strings[arm_fp_model]);
8490 }
8491
8492 static void
8493 arm_set_abi (const char *args, int from_tty,
8494 struct cmd_list_element *c)
8495 {
8496 int arm_abi;
8497
8498 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8499 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8500 {
8501 arm_abi_global = (enum arm_abi_kind) arm_abi;
8502 break;
8503 }
8504
8505 if (arm_abi == ARM_ABI_LAST)
8506 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8507 arm_abi_string);
8508
8509 arm_update_current_architecture ();
8510 }
8511
8512 static void
8513 arm_show_abi (struct ui_file *file, int from_tty,
8514 struct cmd_list_element *c, const char *value)
8515 {
8516 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8517
8518 if (arm_abi_global == ARM_ABI_AUTO
8519 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8520 fprintf_filtered (file, _("\
8521 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8522 arm_abi_strings[tdep->arm_abi]);
8523 else
8524 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8525 arm_abi_string);
8526 }
8527
8528 static void
8529 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8530 struct cmd_list_element *c, const char *value)
8531 {
8532 fprintf_filtered (file,
8533 _("The current execution mode assumed "
8534 "(when symbols are unavailable) is \"%s\".\n"),
8535 arm_fallback_mode_string);
8536 }
8537
8538 static void
8539 arm_show_force_mode (struct ui_file *file, int from_tty,
8540 struct cmd_list_element *c, const char *value)
8541 {
8542 fprintf_filtered (file,
8543 _("The current execution mode assumed "
8544 "(even when symbols are available) is \"%s\".\n"),
8545 arm_force_mode_string);
8546 }
8547
8548 /* If the user changes the register disassembly style used for info
8549 register and other commands, we have to also switch the style used
8550 in opcodes for disassembly output. This function is run in the "set
8551 arm disassembly" command, and does that. */
8552
8553 static void
8554 set_disassembly_style_sfunc (const char *args, int from_tty,
8555 struct cmd_list_element *c)
8556 {
8557 /* Convert the short style name into the long style name (eg, reg-names-*)
8558 before calling the generic set_disassembler_options() function. */
8559 std::string long_name = std::string ("reg-names-") + disassembly_style;
8560 set_disassembler_options (&long_name[0]);
8561 }
8562
8563 static void
8564 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8565 struct cmd_list_element *c, const char *value)
8566 {
8567 struct gdbarch *gdbarch = get_current_arch ();
8568 char *options = get_disassembler_options (gdbarch);
8569 const char *style = "";
8570 int len = 0;
8571 const char *opt;
8572
8573 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8574 if (CONST_STRNEQ (opt, "reg-names-"))
8575 {
8576 style = &opt[strlen ("reg-names-")];
8577 len = strcspn (style, ",");
8578 }
8579
8580 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8581 }
8582 \f
8583 /* Return the ARM register name corresponding to register I. */
8584 static const char *
8585 arm_register_name (struct gdbarch *gdbarch, int i)
8586 {
8587 const int num_regs = gdbarch_num_regs (gdbarch);
8588
8589 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8590 && i >= num_regs && i < num_regs + 32)
8591 {
8592 static const char *const vfp_pseudo_names[] = {
8593 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8594 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8595 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8596 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8597 };
8598
8599 return vfp_pseudo_names[i - num_regs];
8600 }
8601
8602 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8603 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8604 {
8605 static const char *const neon_pseudo_names[] = {
8606 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8607 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8608 };
8609
8610 return neon_pseudo_names[i - num_regs - 32];
8611 }
8612
8613 if (i >= ARRAY_SIZE (arm_register_names))
8614 /* These registers are only supported on targets which supply
8615 an XML description. */
8616 return "";
8617
8618 return arm_register_names[i];
8619 }
8620
8621 /* Test whether the coff symbol specific value corresponds to a Thumb
8622 function. */
8623
8624 static int
8625 coff_sym_is_thumb (int val)
8626 {
8627 return (val == C_THUMBEXT
8628 || val == C_THUMBSTAT
8629 || val == C_THUMBEXTFUNC
8630 || val == C_THUMBSTATFUNC
8631 || val == C_THUMBLABEL);
8632 }
8633
8634 /* arm_coff_make_msymbol_special()
8635 arm_elf_make_msymbol_special()
8636
8637 These functions test whether the COFF or ELF symbol corresponds to
8638 an address in thumb code, and set a "special" bit in a minimal
8639 symbol to indicate that it does. */
8640
8641 static void
8642 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8643 {
8644 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8645
8646 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8647 == ST_BRANCH_TO_THUMB)
8648 MSYMBOL_SET_SPECIAL (msym);
8649 }
8650
8651 static void
8652 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8653 {
8654 if (coff_sym_is_thumb (val))
8655 MSYMBOL_SET_SPECIAL (msym);
8656 }
8657
8658 static void
8659 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8660 asymbol *sym)
8661 {
8662 const char *name = bfd_asymbol_name (sym);
8663 struct arm_per_bfd *data;
8664 struct arm_mapping_symbol new_map_sym;
8665
8666 gdb_assert (name[0] == '$');
8667 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8668 return;
8669
8670 data = arm_bfd_data_key.get (objfile->obfd);
8671 if (data == NULL)
8672 data = arm_bfd_data_key.emplace (objfile->obfd,
8673 objfile->obfd->section_count);
8674 arm_mapping_symbol_vec &map
8675 = data->section_maps[bfd_asymbol_section (sym)->index];
8676
8677 new_map_sym.value = sym->value;
8678 new_map_sym.type = name[1];
8679
8680 /* Insert at the end, the vector will be sorted on first use. */
8681 map.push_back (new_map_sym);
8682 }
8683
8684 static void
8685 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8686 {
8687 struct gdbarch *gdbarch = regcache->arch ();
8688 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8689
8690 /* If necessary, set the T bit. */
8691 if (arm_apcs_32)
8692 {
8693 ULONGEST val, t_bit;
8694 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8695 t_bit = arm_psr_thumb_bit (gdbarch);
8696 if (arm_pc_is_thumb (gdbarch, pc))
8697 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8698 val | t_bit);
8699 else
8700 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8701 val & ~t_bit);
8702 }
8703 }
8704
8705 /* Read the contents of a NEON quad register, by reading from two
8706 double registers. This is used to implement the quad pseudo
8707 registers, and for argument passing in case the quad registers are
8708 missing; vectors are passed in quad registers when using the VFP
8709 ABI, even if a NEON unit is not present. REGNUM is the index of
8710 the quad register, in [0, 15]. */
8711
8712 static enum register_status
8713 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8714 int regnum, gdb_byte *buf)
8715 {
8716 char name_buf[4];
8717 gdb_byte reg_buf[8];
8718 int offset, double_regnum;
8719 enum register_status status;
8720
8721 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8722 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8723 strlen (name_buf));
8724
8725 /* d0 is always the least significant half of q0. */
8726 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8727 offset = 8;
8728 else
8729 offset = 0;
8730
8731 status = regcache->raw_read (double_regnum, reg_buf);
8732 if (status != REG_VALID)
8733 return status;
8734 memcpy (buf + offset, reg_buf, 8);
8735
8736 offset = 8 - offset;
8737 status = regcache->raw_read (double_regnum + 1, reg_buf);
8738 if (status != REG_VALID)
8739 return status;
8740 memcpy (buf + offset, reg_buf, 8);
8741
8742 return REG_VALID;
8743 }
8744
8745 static enum register_status
8746 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8747 int regnum, gdb_byte *buf)
8748 {
8749 const int num_regs = gdbarch_num_regs (gdbarch);
8750 char name_buf[4];
8751 gdb_byte reg_buf[8];
8752 int offset, double_regnum;
8753
8754 gdb_assert (regnum >= num_regs);
8755 regnum -= num_regs;
8756
8757 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8758 /* Quad-precision register. */
8759 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8760 else
8761 {
8762 enum register_status status;
8763
8764 /* Single-precision register. */
8765 gdb_assert (regnum < 32);
8766
8767 /* s0 is always the least significant half of d0. */
8768 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8769 offset = (regnum & 1) ? 0 : 4;
8770 else
8771 offset = (regnum & 1) ? 4 : 0;
8772
8773 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8774 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8775 strlen (name_buf));
8776
8777 status = regcache->raw_read (double_regnum, reg_buf);
8778 if (status == REG_VALID)
8779 memcpy (buf, reg_buf + offset, 4);
8780 return status;
8781 }
8782 }
8783
8784 /* Store the contents of BUF to a NEON quad register, by writing to
8785 two double registers. This is used to implement the quad pseudo
8786 registers, and for argument passing in case the quad registers are
8787 missing; vectors are passed in quad registers when using the VFP
8788 ABI, even if a NEON unit is not present. REGNUM is the index
8789 of the quad register, in [0, 15]. */
8790
8791 static void
8792 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8793 int regnum, const gdb_byte *buf)
8794 {
8795 char name_buf[4];
8796 int offset, double_regnum;
8797
8798 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8799 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8800 strlen (name_buf));
8801
8802 /* d0 is always the least significant half of q0. */
8803 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8804 offset = 8;
8805 else
8806 offset = 0;
8807
8808 regcache->raw_write (double_regnum, buf + offset);
8809 offset = 8 - offset;
8810 regcache->raw_write (double_regnum + 1, buf + offset);
8811 }
8812
8813 static void
8814 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8815 int regnum, const gdb_byte *buf)
8816 {
8817 const int num_regs = gdbarch_num_regs (gdbarch);
8818 char name_buf[4];
8819 gdb_byte reg_buf[8];
8820 int offset, double_regnum;
8821
8822 gdb_assert (regnum >= num_regs);
8823 regnum -= num_regs;
8824
8825 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8826 /* Quad-precision register. */
8827 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8828 else
8829 {
8830 /* Single-precision register. */
8831 gdb_assert (regnum < 32);
8832
8833 /* s0 is always the least significant half of d0. */
8834 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8835 offset = (regnum & 1) ? 0 : 4;
8836 else
8837 offset = (regnum & 1) ? 4 : 0;
8838
8839 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8840 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8841 strlen (name_buf));
8842
8843 regcache->raw_read (double_regnum, reg_buf);
8844 memcpy (reg_buf + offset, buf, 4);
8845 regcache->raw_write (double_regnum, reg_buf);
8846 }
8847 }
8848
8849 static struct value *
8850 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8851 {
8852 const int *reg_p = (const int *) baton;
8853 return value_of_register (*reg_p, frame);
8854 }
8855 \f
8856 static enum gdb_osabi
8857 arm_elf_osabi_sniffer (bfd *abfd)
8858 {
8859 unsigned int elfosabi;
8860 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8861
8862 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8863
8864 if (elfosabi == ELFOSABI_ARM)
8865 /* GNU tools use this value. Check note sections in this case,
8866 as well. */
8867 {
8868 for (asection *sect : gdb_bfd_sections (abfd))
8869 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8870 }
8871
8872 /* Anything else will be handled by the generic ELF sniffer. */
8873 return osabi;
8874 }
8875
8876 static int
8877 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8878 struct reggroup *group)
8879 {
8880 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8881 this, FPS register belongs to save_regroup, restore_reggroup, and
8882 all_reggroup, of course. */
8883 if (regnum == ARM_FPS_REGNUM)
8884 return (group == float_reggroup
8885 || group == save_reggroup
8886 || group == restore_reggroup
8887 || group == all_reggroup);
8888 else
8889 return default_register_reggroup_p (gdbarch, regnum, group);
8890 }
8891
8892 /* For backward-compatibility we allow two 'g' packet lengths with
8893 the remote protocol depending on whether FPA registers are
8894 supplied. M-profile targets do not have FPA registers, but some
8895 stubs already exist in the wild which use a 'g' packet which
8896 supplies them albeit with dummy values. The packet format which
8897 includes FPA registers should be considered deprecated for
8898 M-profile targets. */
8899
8900 static void
8901 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8902 {
8903 if (gdbarch_tdep (gdbarch)->is_m)
8904 {
8905 const target_desc *tdesc;
8906
8907 /* If we know from the executable this is an M-profile target,
8908 cater for remote targets whose register set layout is the
8909 same as the FPA layout. */
8910 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8911 register_remote_g_packet_guess (gdbarch,
8912 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8913 tdesc);
8914
8915 /* The regular M-profile layout. */
8916 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8917 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8918 tdesc);
8919
8920 /* M-profile plus M4F VFP. */
8921 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8922 register_remote_g_packet_guess (gdbarch,
8923 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8924 tdesc);
8925 }
8926
8927 /* Otherwise we don't have a useful guess. */
8928 }
8929
8930 /* Implement the code_of_frame_writable gdbarch method. */
8931
8932 static int
8933 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8934 {
8935 if (gdbarch_tdep (gdbarch)->is_m
8936 && get_frame_type (frame) == SIGTRAMP_FRAME)
8937 {
8938 /* M-profile exception frames return to some magic PCs, where
8939 isn't writable at all. */
8940 return 0;
8941 }
8942 else
8943 return 1;
8944 }
8945
8946 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8947 to be postfixed by a version (eg armv7hl). */
8948
8949 static const char *
8950 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8951 {
8952 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8953 return "arm(v[^- ]*)?";
8954 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8955 }
8956
8957 /* Initialize the current architecture based on INFO. If possible,
8958 re-use an architecture from ARCHES, which is a list of
8959 architectures already created during this debugging session.
8960
8961 Called e.g. at program startup, when reading a core file, and when
8962 reading a binary file. */
8963
8964 static struct gdbarch *
8965 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8966 {
8967 struct gdbarch_tdep *tdep;
8968 struct gdbarch *gdbarch;
8969 struct gdbarch_list *best_arch;
8970 enum arm_abi_kind arm_abi = arm_abi_global;
8971 enum arm_float_model fp_model = arm_fp_model;
8972 tdesc_arch_data_up tdesc_data;
8973 int i;
8974 bool is_m = false;
8975 int vfp_register_count = 0;
8976 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8977 bool have_wmmx_registers = false;
8978 bool have_neon = false;
8979 bool have_fpa_registers = true;
8980 const struct target_desc *tdesc = info.target_desc;
8981
8982 /* If we have an object to base this architecture on, try to determine
8983 its ABI. */
8984
8985 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8986 {
8987 int ei_osabi, e_flags;
8988
8989 switch (bfd_get_flavour (info.abfd))
8990 {
8991 case bfd_target_coff_flavour:
8992 /* Assume it's an old APCS-style ABI. */
8993 /* XXX WinCE? */
8994 arm_abi = ARM_ABI_APCS;
8995 break;
8996
8997 case bfd_target_elf_flavour:
8998 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8999 e_flags = elf_elfheader (info.abfd)->e_flags;
9000
9001 if (ei_osabi == ELFOSABI_ARM)
9002 {
9003 /* GNU tools used to use this value, but do not for EABI
9004 objects. There's nowhere to tag an EABI version
9005 anyway, so assume APCS. */
9006 arm_abi = ARM_ABI_APCS;
9007 }
9008 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9009 {
9010 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9011
9012 switch (eabi_ver)
9013 {
9014 case EF_ARM_EABI_UNKNOWN:
9015 /* Assume GNU tools. */
9016 arm_abi = ARM_ABI_APCS;
9017 break;
9018
9019 case EF_ARM_EABI_VER4:
9020 case EF_ARM_EABI_VER5:
9021 arm_abi = ARM_ABI_AAPCS;
9022 /* EABI binaries default to VFP float ordering.
9023 They may also contain build attributes that can
9024 be used to identify if the VFP argument-passing
9025 ABI is in use. */
9026 if (fp_model == ARM_FLOAT_AUTO)
9027 {
9028 #ifdef HAVE_ELF
9029 switch (bfd_elf_get_obj_attr_int (info.abfd,
9030 OBJ_ATTR_PROC,
9031 Tag_ABI_VFP_args))
9032 {
9033 case AEABI_VFP_args_base:
9034 /* "The user intended FP parameter/result
9035 passing to conform to AAPCS, base
9036 variant". */
9037 fp_model = ARM_FLOAT_SOFT_VFP;
9038 break;
9039 case AEABI_VFP_args_vfp:
9040 /* "The user intended FP parameter/result
9041 passing to conform to AAPCS, VFP
9042 variant". */
9043 fp_model = ARM_FLOAT_VFP;
9044 break;
9045 case AEABI_VFP_args_toolchain:
9046 /* "The user intended FP parameter/result
9047 passing to conform to tool chain-specific
9048 conventions" - we don't know any such
9049 conventions, so leave it as "auto". */
9050 break;
9051 case AEABI_VFP_args_compatible:
9052 /* "Code is compatible with both the base
9053 and VFP variants; the user did not permit
9054 non-variadic functions to pass FP
9055 parameters/results" - leave it as
9056 "auto". */
9057 break;
9058 default:
9059 /* Attribute value not mentioned in the
9060 November 2012 ABI, so leave it as
9061 "auto". */
9062 break;
9063 }
9064 #else
9065 fp_model = ARM_FLOAT_SOFT_VFP;
9066 #endif
9067 }
9068 break;
9069
9070 default:
9071 /* Leave it as "auto". */
9072 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9073 break;
9074 }
9075
9076 #ifdef HAVE_ELF
9077 /* Detect M-profile programs. This only works if the
9078 executable file includes build attributes; GCC does
9079 copy them to the executable, but e.g. RealView does
9080 not. */
9081 int attr_arch
9082 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9083 Tag_CPU_arch);
9084 int attr_profile
9085 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9086 Tag_CPU_arch_profile);
9087
9088 /* GCC specifies the profile for v6-M; RealView only
9089 specifies the profile for architectures starting with
9090 V7 (as opposed to architectures with a tag
9091 numerically greater than TAG_CPU_ARCH_V7). */
9092 if (!tdesc_has_registers (tdesc)
9093 && (attr_arch == TAG_CPU_ARCH_V6_M
9094 || attr_arch == TAG_CPU_ARCH_V6S_M
9095 || attr_profile == 'M'))
9096 is_m = true;
9097 #endif
9098 }
9099
9100 if (fp_model == ARM_FLOAT_AUTO)
9101 {
9102 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9103 {
9104 case 0:
9105 /* Leave it as "auto". Strictly speaking this case
9106 means FPA, but almost nobody uses that now, and
9107 many toolchains fail to set the appropriate bits
9108 for the floating-point model they use. */
9109 break;
9110 case EF_ARM_SOFT_FLOAT:
9111 fp_model = ARM_FLOAT_SOFT_FPA;
9112 break;
9113 case EF_ARM_VFP_FLOAT:
9114 fp_model = ARM_FLOAT_VFP;
9115 break;
9116 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9117 fp_model = ARM_FLOAT_SOFT_VFP;
9118 break;
9119 }
9120 }
9121
9122 if (e_flags & EF_ARM_BE8)
9123 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9124
9125 break;
9126
9127 default:
9128 /* Leave it as "auto". */
9129 break;
9130 }
9131 }
9132
9133 /* Check any target description for validity. */
9134 if (tdesc_has_registers (tdesc))
9135 {
9136 /* For most registers we require GDB's default names; but also allow
9137 the numeric names for sp / lr / pc, as a convenience. */
9138 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9139 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9140 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9141
9142 const struct tdesc_feature *feature;
9143 int valid_p;
9144
9145 feature = tdesc_find_feature (tdesc,
9146 "org.gnu.gdb.arm.core");
9147 if (feature == NULL)
9148 {
9149 feature = tdesc_find_feature (tdesc,
9150 "org.gnu.gdb.arm.m-profile");
9151 if (feature == NULL)
9152 return NULL;
9153 else
9154 is_m = true;
9155 }
9156
9157 tdesc_data = tdesc_data_alloc ();
9158
9159 valid_p = 1;
9160 for (i = 0; i < ARM_SP_REGNUM; i++)
9161 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9162 arm_register_names[i]);
9163 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9164 ARM_SP_REGNUM,
9165 arm_sp_names);
9166 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9167 ARM_LR_REGNUM,
9168 arm_lr_names);
9169 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9170 ARM_PC_REGNUM,
9171 arm_pc_names);
9172 if (is_m)
9173 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9174 ARM_PS_REGNUM, "xpsr");
9175 else
9176 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9177 ARM_PS_REGNUM, "cpsr");
9178
9179 if (!valid_p)
9180 return NULL;
9181
9182 feature = tdesc_find_feature (tdesc,
9183 "org.gnu.gdb.arm.fpa");
9184 if (feature != NULL)
9185 {
9186 valid_p = 1;
9187 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9188 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9189 arm_register_names[i]);
9190 if (!valid_p)
9191 return NULL;
9192 }
9193 else
9194 have_fpa_registers = false;
9195
9196 feature = tdesc_find_feature (tdesc,
9197 "org.gnu.gdb.xscale.iwmmxt");
9198 if (feature != NULL)
9199 {
9200 static const char *const iwmmxt_names[] = {
9201 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9202 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9203 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9204 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9205 };
9206
9207 valid_p = 1;
9208 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9209 valid_p
9210 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9211 iwmmxt_names[i - ARM_WR0_REGNUM]);
9212
9213 /* Check for the control registers, but do not fail if they
9214 are missing. */
9215 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9216 tdesc_numbered_register (feature, tdesc_data.get (), i,
9217 iwmmxt_names[i - ARM_WR0_REGNUM]);
9218
9219 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9220 valid_p
9221 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9222 iwmmxt_names[i - ARM_WR0_REGNUM]);
9223
9224 if (!valid_p)
9225 return NULL;
9226
9227 have_wmmx_registers = true;
9228 }
9229
9230 /* If we have a VFP unit, check whether the single precision registers
9231 are present. If not, then we will synthesize them as pseudo
9232 registers. */
9233 feature = tdesc_find_feature (tdesc,
9234 "org.gnu.gdb.arm.vfp");
9235 if (feature != NULL)
9236 {
9237 static const char *const vfp_double_names[] = {
9238 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9239 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9240 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9241 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9242 };
9243
9244 /* Require the double precision registers. There must be either
9245 16 or 32. */
9246 valid_p = 1;
9247 for (i = 0; i < 32; i++)
9248 {
9249 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9250 ARM_D0_REGNUM + i,
9251 vfp_double_names[i]);
9252 if (!valid_p)
9253 break;
9254 }
9255 if (!valid_p && i == 16)
9256 valid_p = 1;
9257
9258 /* Also require FPSCR. */
9259 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9260 ARM_FPSCR_REGNUM, "fpscr");
9261 if (!valid_p)
9262 return NULL;
9263
9264 if (tdesc_unnumbered_register (feature, "s0") == 0)
9265 have_vfp_pseudos = true;
9266
9267 vfp_register_count = i;
9268
9269 /* If we have VFP, also check for NEON. The architecture allows
9270 NEON without VFP (integer vector operations only), but GDB
9271 does not support that. */
9272 feature = tdesc_find_feature (tdesc,
9273 "org.gnu.gdb.arm.neon");
9274 if (feature != NULL)
9275 {
9276 /* NEON requires 32 double-precision registers. */
9277 if (i != 32)
9278 return NULL;
9279
9280 /* If there are quad registers defined by the stub, use
9281 their type; otherwise (normally) provide them with
9282 the default type. */
9283 if (tdesc_unnumbered_register (feature, "q0") == 0)
9284 have_neon_pseudos = true;
9285
9286 have_neon = true;
9287 }
9288 }
9289 }
9290
9291 /* If there is already a candidate, use it. */
9292 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9293 best_arch != NULL;
9294 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9295 {
9296 if (arm_abi != ARM_ABI_AUTO
9297 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9298 continue;
9299
9300 if (fp_model != ARM_FLOAT_AUTO
9301 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9302 continue;
9303
9304 /* There are various other properties in tdep that we do not
9305 need to check here: those derived from a target description,
9306 since gdbarches with a different target description are
9307 automatically disqualified. */
9308
9309 /* Do check is_m, though, since it might come from the binary. */
9310 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9311 continue;
9312
9313 /* Found a match. */
9314 break;
9315 }
9316
9317 if (best_arch != NULL)
9318 return best_arch->gdbarch;
9319
9320 tdep = XCNEW (struct gdbarch_tdep);
9321 gdbarch = gdbarch_alloc (&info, tdep);
9322
9323 /* Record additional information about the architecture we are defining.
9324 These are gdbarch discriminators, like the OSABI. */
9325 tdep->arm_abi = arm_abi;
9326 tdep->fp_model = fp_model;
9327 tdep->is_m = is_m;
9328 tdep->have_fpa_registers = have_fpa_registers;
9329 tdep->have_wmmx_registers = have_wmmx_registers;
9330 gdb_assert (vfp_register_count == 0
9331 || vfp_register_count == 16
9332 || vfp_register_count == 32);
9333 tdep->vfp_register_count = vfp_register_count;
9334 tdep->have_vfp_pseudos = have_vfp_pseudos;
9335 tdep->have_neon_pseudos = have_neon_pseudos;
9336 tdep->have_neon = have_neon;
9337
9338 arm_register_g_packet_guesses (gdbarch);
9339
9340 /* Breakpoints. */
9341 switch (info.byte_order_for_code)
9342 {
9343 case BFD_ENDIAN_BIG:
9344 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9345 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9346 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9347 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9348
9349 break;
9350
9351 case BFD_ENDIAN_LITTLE:
9352 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9353 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9354 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9355 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9356
9357 break;
9358
9359 default:
9360 internal_error (__FILE__, __LINE__,
9361 _("arm_gdbarch_init: bad byte order for float format"));
9362 }
9363
9364 /* On ARM targets char defaults to unsigned. */
9365 set_gdbarch_char_signed (gdbarch, 0);
9366
9367 /* wchar_t is unsigned under the AAPCS. */
9368 if (tdep->arm_abi == ARM_ABI_AAPCS)
9369 set_gdbarch_wchar_signed (gdbarch, 0);
9370 else
9371 set_gdbarch_wchar_signed (gdbarch, 1);
9372
9373 /* Compute type alignment. */
9374 set_gdbarch_type_align (gdbarch, arm_type_align);
9375
9376 /* Note: for displaced stepping, this includes the breakpoint, and one word
9377 of additional scratch space. This setting isn't used for anything beside
9378 displaced stepping at present. */
9379 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9380
9381 /* This should be low enough for everything. */
9382 tdep->lowest_pc = 0x20;
9383 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9384
9385 /* The default, for both APCS and AAPCS, is to return small
9386 structures in registers. */
9387 tdep->struct_return = reg_struct_return;
9388
9389 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9390 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9391
9392 if (is_m)
9393 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9394
9395 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9396
9397 frame_base_set_default (gdbarch, &arm_normal_base);
9398
9399 /* Address manipulation. */
9400 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9401
9402 /* Advance PC across function entry code. */
9403 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9404
9405 /* Detect whether PC is at a point where the stack has been destroyed. */
9406 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9407
9408 /* Skip trampolines. */
9409 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9410
9411 /* The stack grows downward. */
9412 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9413
9414 /* Breakpoint manipulation. */
9415 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9416 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9417 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9418 arm_breakpoint_kind_from_current_state);
9419
9420 /* Information about registers, etc. */
9421 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9422 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9423 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9424 set_gdbarch_register_type (gdbarch, arm_register_type);
9425 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9426
9427 /* This "info float" is FPA-specific. Use the generic version if we
9428 do not have FPA. */
9429 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9430 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9431
9432 /* Internal <-> external register number maps. */
9433 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9434 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9435
9436 set_gdbarch_register_name (gdbarch, arm_register_name);
9437
9438 /* Returning results. */
9439 set_gdbarch_return_value (gdbarch, arm_return_value);
9440
9441 /* Disassembly. */
9442 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9443
9444 /* Minsymbol frobbing. */
9445 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9446 set_gdbarch_coff_make_msymbol_special (gdbarch,
9447 arm_coff_make_msymbol_special);
9448 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9449
9450 /* Thumb-2 IT block support. */
9451 set_gdbarch_adjust_breakpoint_address (gdbarch,
9452 arm_adjust_breakpoint_address);
9453
9454 /* Virtual tables. */
9455 set_gdbarch_vbit_in_delta (gdbarch, 1);
9456
9457 /* Hook in the ABI-specific overrides, if they have been registered. */
9458 gdbarch_init_osabi (info, gdbarch);
9459
9460 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9461
9462 /* Add some default predicates. */
9463 if (is_m)
9464 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9465 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9466 dwarf2_append_unwinders (gdbarch);
9467 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9468 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9469 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9470
9471 /* Now we have tuned the configuration, set a few final things,
9472 based on what the OS ABI has told us. */
9473
9474 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9475 binaries are always marked. */
9476 if (tdep->arm_abi == ARM_ABI_AUTO)
9477 tdep->arm_abi = ARM_ABI_APCS;
9478
9479 /* Watchpoints are not steppable. */
9480 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9481
9482 /* We used to default to FPA for generic ARM, but almost nobody
9483 uses that now, and we now provide a way for the user to force
9484 the model. So default to the most useful variant. */
9485 if (tdep->fp_model == ARM_FLOAT_AUTO)
9486 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9487
9488 if (tdep->jb_pc >= 0)
9489 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9490
9491 /* Floating point sizes and format. */
9492 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9493 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9494 {
9495 set_gdbarch_double_format
9496 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9497 set_gdbarch_long_double_format
9498 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9499 }
9500 else
9501 {
9502 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9503 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9504 }
9505
9506 if (have_vfp_pseudos)
9507 {
9508 /* NOTE: These are the only pseudo registers used by
9509 the ARM target at the moment. If more are added, a
9510 little more care in numbering will be needed. */
9511
9512 int num_pseudos = 32;
9513 if (have_neon_pseudos)
9514 num_pseudos += 16;
9515 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9516 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9517 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9518 }
9519
9520 if (tdesc_data != nullptr)
9521 {
9522 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9523
9524 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9525
9526 /* Override tdesc_register_type to adjust the types of VFP
9527 registers for NEON. */
9528 set_gdbarch_register_type (gdbarch, arm_register_type);
9529 }
9530
9531 /* Add standard register aliases. We add aliases even for those
9532 names which are used by the current architecture - it's simpler,
9533 and does no harm, since nothing ever lists user registers. */
9534 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9535 user_reg_add (gdbarch, arm_register_aliases[i].name,
9536 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9537
9538 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9539 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9540
9541 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9542
9543 return gdbarch;
9544 }
9545
9546 static void
9547 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9548 {
9549 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9550
9551 if (tdep == NULL)
9552 return;
9553
9554 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9555 (int) tdep->fp_model);
9556 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9557 (int) tdep->have_fpa_registers);
9558 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9559 (int) tdep->have_wmmx_registers);
9560 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9561 (int) tdep->vfp_register_count);
9562 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9563 (int) tdep->have_vfp_pseudos);
9564 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9565 (int) tdep->have_neon_pseudos);
9566 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9567 (int) tdep->have_neon);
9568 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9569 (unsigned long) tdep->lowest_pc);
9570 }
9571
9572 #if GDB_SELF_TEST
9573 namespace selftests
9574 {
9575 static void arm_record_test (void);
9576 }
9577 #endif
9578
9579 void _initialize_arm_tdep ();
9580 void
9581 _initialize_arm_tdep ()
9582 {
9583 long length;
9584 int i, j;
9585 char regdesc[1024], *rdptr = regdesc;
9586 size_t rest = sizeof (regdesc);
9587
9588 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9589
9590 /* Add ourselves to objfile event chain. */
9591 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9592
9593 /* Register an ELF OS ABI sniffer for ARM binaries. */
9594 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9595 bfd_target_elf_flavour,
9596 arm_elf_osabi_sniffer);
9597
9598 /* Add root prefix command for all "set arm"/"show arm" commands. */
9599 add_basic_prefix_cmd ("arm", no_class,
9600 _("Various ARM-specific commands."),
9601 &setarmcmdlist, "set arm ", 0, &setlist);
9602
9603 add_show_prefix_cmd ("arm", no_class,
9604 _("Various ARM-specific commands."),
9605 &showarmcmdlist, "show arm ", 0, &showlist);
9606
9607
9608 arm_disassembler_options = xstrdup ("reg-names-std");
9609 const disasm_options_t *disasm_options
9610 = &disassembler_options_arm ()->options;
9611 int num_disassembly_styles = 0;
9612 for (i = 0; disasm_options->name[i] != NULL; i++)
9613 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9614 num_disassembly_styles++;
9615
9616 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9617 valid_disassembly_styles = XNEWVEC (const char *,
9618 num_disassembly_styles + 1);
9619 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9620 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9621 {
9622 size_t offset = strlen ("reg-names-");
9623 const char *style = disasm_options->name[i];
9624 valid_disassembly_styles[j++] = &style[offset];
9625 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9626 disasm_options->description[i]);
9627 rdptr += length;
9628 rest -= length;
9629 }
9630 /* Mark the end of valid options. */
9631 valid_disassembly_styles[num_disassembly_styles] = NULL;
9632
9633 /* Create the help text. */
9634 std::string helptext = string_printf ("%s%s%s",
9635 _("The valid values are:\n"),
9636 regdesc,
9637 _("The default is \"std\"."));
9638
9639 add_setshow_enum_cmd("disassembler", no_class,
9640 valid_disassembly_styles, &disassembly_style,
9641 _("Set the disassembly style."),
9642 _("Show the disassembly style."),
9643 helptext.c_str (),
9644 set_disassembly_style_sfunc,
9645 show_disassembly_style_sfunc,
9646 &setarmcmdlist, &showarmcmdlist);
9647
9648 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9649 _("Set usage of ARM 32-bit mode."),
9650 _("Show usage of ARM 32-bit mode."),
9651 _("When off, a 26-bit PC will be used."),
9652 NULL,
9653 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9654 mode is %s. */
9655 &setarmcmdlist, &showarmcmdlist);
9656
9657 /* Add a command to allow the user to force the FPU model. */
9658 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9659 _("Set the floating point type."),
9660 _("Show the floating point type."),
9661 _("auto - Determine the FP typefrom the OS-ABI.\n\
9662 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9663 fpa - FPA co-processor (GCC compiled).\n\
9664 softvfp - Software FP with pure-endian doubles.\n\
9665 vfp - VFP co-processor."),
9666 set_fp_model_sfunc, show_fp_model,
9667 &setarmcmdlist, &showarmcmdlist);
9668
9669 /* Add a command to allow the user to force the ABI. */
9670 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9671 _("Set the ABI."),
9672 _("Show the ABI."),
9673 NULL, arm_set_abi, arm_show_abi,
9674 &setarmcmdlist, &showarmcmdlist);
9675
9676 /* Add two commands to allow the user to force the assumed
9677 execution mode. */
9678 add_setshow_enum_cmd ("fallback-mode", class_support,
9679 arm_mode_strings, &arm_fallback_mode_string,
9680 _("Set the mode assumed when symbols are unavailable."),
9681 _("Show the mode assumed when symbols are unavailable."),
9682 NULL, NULL, arm_show_fallback_mode,
9683 &setarmcmdlist, &showarmcmdlist);
9684 add_setshow_enum_cmd ("force-mode", class_support,
9685 arm_mode_strings, &arm_force_mode_string,
9686 _("Set the mode assumed even when symbols are available."),
9687 _("Show the mode assumed even when symbols are available."),
9688 NULL, NULL, arm_show_force_mode,
9689 &setarmcmdlist, &showarmcmdlist);
9690
9691 /* Debugging flag. */
9692 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9693 _("Set ARM debugging."),
9694 _("Show ARM debugging."),
9695 _("When on, arm-specific debugging is enabled."),
9696 NULL,
9697 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9698 &setdebuglist, &showdebuglist);
9699
9700 #if GDB_SELF_TEST
9701 selftests::register_test ("arm-record", selftests::arm_record_test);
9702 #endif
9703
9704 }
9705
9706 /* ARM-reversible process record data structures. */
9707
9708 #define ARM_INSN_SIZE_BYTES 4
9709 #define THUMB_INSN_SIZE_BYTES 2
9710 #define THUMB2_INSN_SIZE_BYTES 4
9711
9712
9713 /* Position of the bit within a 32-bit ARM instruction
9714 that defines whether the instruction is a load or store. */
9715 #define INSN_S_L_BIT_NUM 20
9716
9717 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9718 do \
9719 { \
9720 unsigned int reg_len = LENGTH; \
9721 if (reg_len) \
9722 { \
9723 REGS = XNEWVEC (uint32_t, reg_len); \
9724 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9725 } \
9726 } \
9727 while (0)
9728
9729 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9730 do \
9731 { \
9732 unsigned int mem_len = LENGTH; \
9733 if (mem_len) \
9734 { \
9735 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9736 memcpy(&MEMS->len, &RECORD_BUF[0], \
9737 sizeof(struct arm_mem_r) * LENGTH); \
9738 } \
9739 } \
9740 while (0)
9741
9742 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9743 #define INSN_RECORDED(ARM_RECORD) \
9744 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9745
9746 /* ARM memory record structure. */
9747 struct arm_mem_r
9748 {
9749 uint32_t len; /* Record length. */
9750 uint32_t addr; /* Memory address. */
9751 };
9752
9753 /* ARM instruction record contains opcode of current insn
9754 and execution state (before entry to decode_insn()),
9755 contains list of to-be-modified registers and
9756 memory blocks (on return from decode_insn()). */
9757
9758 typedef struct insn_decode_record_t
9759 {
9760 struct gdbarch *gdbarch;
9761 struct regcache *regcache;
9762 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9763 uint32_t arm_insn; /* Should accommodate thumb. */
9764 uint32_t cond; /* Condition code. */
9765 uint32_t opcode; /* Insn opcode. */
9766 uint32_t decode; /* Insn decode bits. */
9767 uint32_t mem_rec_count; /* No of mem records. */
9768 uint32_t reg_rec_count; /* No of reg records. */
9769 uint32_t *arm_regs; /* Registers to be saved for this record. */
9770 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9771 } insn_decode_record;
9772
9773
9774 /* Checks ARM SBZ and SBO mandatory fields. */
9775
9776 static int
9777 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9778 {
9779 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9780
9781 if (!len)
9782 return 1;
9783
9784 if (!sbo)
9785 ones = ~ones;
9786
9787 while (ones)
9788 {
9789 if (!(ones & sbo))
9790 {
9791 return 0;
9792 }
9793 ones = ones >> 1;
9794 }
9795 return 1;
9796 }
9797
9798 enum arm_record_result
9799 {
9800 ARM_RECORD_SUCCESS = 0,
9801 ARM_RECORD_FAILURE = 1
9802 };
9803
9804 typedef enum
9805 {
9806 ARM_RECORD_STRH=1,
9807 ARM_RECORD_STRD
9808 } arm_record_strx_t;
9809
9810 typedef enum
9811 {
9812 ARM_RECORD=1,
9813 THUMB_RECORD,
9814 THUMB2_RECORD
9815 } record_type_t;
9816
9817
9818 static int
9819 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9820 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9821 {
9822
9823 struct regcache *reg_cache = arm_insn_r->regcache;
9824 ULONGEST u_regval[2]= {0};
9825
9826 uint32_t reg_src1 = 0, reg_src2 = 0;
9827 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9828
9829 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9830 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9831
9832 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9833 {
9834 /* 1) Handle misc store, immediate offset. */
9835 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9836 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9837 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9838 regcache_raw_read_unsigned (reg_cache, reg_src1,
9839 &u_regval[0]);
9840 if (ARM_PC_REGNUM == reg_src1)
9841 {
9842 /* If R15 was used as Rn, hence current PC+8. */
9843 u_regval[0] = u_regval[0] + 8;
9844 }
9845 offset_8 = (immed_high << 4) | immed_low;
9846 /* Calculate target store address. */
9847 if (14 == arm_insn_r->opcode)
9848 {
9849 tgt_mem_addr = u_regval[0] + offset_8;
9850 }
9851 else
9852 {
9853 tgt_mem_addr = u_regval[0] - offset_8;
9854 }
9855 if (ARM_RECORD_STRH == str_type)
9856 {
9857 record_buf_mem[0] = 2;
9858 record_buf_mem[1] = tgt_mem_addr;
9859 arm_insn_r->mem_rec_count = 1;
9860 }
9861 else if (ARM_RECORD_STRD == str_type)
9862 {
9863 record_buf_mem[0] = 4;
9864 record_buf_mem[1] = tgt_mem_addr;
9865 record_buf_mem[2] = 4;
9866 record_buf_mem[3] = tgt_mem_addr + 4;
9867 arm_insn_r->mem_rec_count = 2;
9868 }
9869 }
9870 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9871 {
9872 /* 2) Store, register offset. */
9873 /* Get Rm. */
9874 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9875 /* Get Rn. */
9876 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9877 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9878 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9879 if (15 == reg_src2)
9880 {
9881 /* If R15 was used as Rn, hence current PC+8. */
9882 u_regval[0] = u_regval[0] + 8;
9883 }
9884 /* Calculate target store address, Rn +/- Rm, register offset. */
9885 if (12 == arm_insn_r->opcode)
9886 {
9887 tgt_mem_addr = u_regval[0] + u_regval[1];
9888 }
9889 else
9890 {
9891 tgt_mem_addr = u_regval[1] - u_regval[0];
9892 }
9893 if (ARM_RECORD_STRH == str_type)
9894 {
9895 record_buf_mem[0] = 2;
9896 record_buf_mem[1] = tgt_mem_addr;
9897 arm_insn_r->mem_rec_count = 1;
9898 }
9899 else if (ARM_RECORD_STRD == str_type)
9900 {
9901 record_buf_mem[0] = 4;
9902 record_buf_mem[1] = tgt_mem_addr;
9903 record_buf_mem[2] = 4;
9904 record_buf_mem[3] = tgt_mem_addr + 4;
9905 arm_insn_r->mem_rec_count = 2;
9906 }
9907 }
9908 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9909 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9910 {
9911 /* 3) Store, immediate pre-indexed. */
9912 /* 5) Store, immediate post-indexed. */
9913 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9914 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9915 offset_8 = (immed_high << 4) | immed_low;
9916 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9917 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9918 /* Calculate target store address, Rn +/- Rm, register offset. */
9919 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9920 {
9921 tgt_mem_addr = u_regval[0] + offset_8;
9922 }
9923 else
9924 {
9925 tgt_mem_addr = u_regval[0] - offset_8;
9926 }
9927 if (ARM_RECORD_STRH == str_type)
9928 {
9929 record_buf_mem[0] = 2;
9930 record_buf_mem[1] = tgt_mem_addr;
9931 arm_insn_r->mem_rec_count = 1;
9932 }
9933 else if (ARM_RECORD_STRD == str_type)
9934 {
9935 record_buf_mem[0] = 4;
9936 record_buf_mem[1] = tgt_mem_addr;
9937 record_buf_mem[2] = 4;
9938 record_buf_mem[3] = tgt_mem_addr + 4;
9939 arm_insn_r->mem_rec_count = 2;
9940 }
9941 /* Record Rn also as it changes. */
9942 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9943 arm_insn_r->reg_rec_count = 1;
9944 }
9945 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9946 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9947 {
9948 /* 4) Store, register pre-indexed. */
9949 /* 6) Store, register post -indexed. */
9950 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9951 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9952 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9953 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9954 /* Calculate target store address, Rn +/- Rm, register offset. */
9955 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9956 {
9957 tgt_mem_addr = u_regval[0] + u_regval[1];
9958 }
9959 else
9960 {
9961 tgt_mem_addr = u_regval[1] - u_regval[0];
9962 }
9963 if (ARM_RECORD_STRH == str_type)
9964 {
9965 record_buf_mem[0] = 2;
9966 record_buf_mem[1] = tgt_mem_addr;
9967 arm_insn_r->mem_rec_count = 1;
9968 }
9969 else if (ARM_RECORD_STRD == str_type)
9970 {
9971 record_buf_mem[0] = 4;
9972 record_buf_mem[1] = tgt_mem_addr;
9973 record_buf_mem[2] = 4;
9974 record_buf_mem[3] = tgt_mem_addr + 4;
9975 arm_insn_r->mem_rec_count = 2;
9976 }
9977 /* Record Rn also as it changes. */
9978 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9979 arm_insn_r->reg_rec_count = 1;
9980 }
9981 return 0;
9982 }
9983
9984 /* Handling ARM extension space insns. */
9985
9986 static int
9987 arm_record_extension_space (insn_decode_record *arm_insn_r)
9988 {
9989 int ret = 0; /* Return value: -1:record failure ; 0:success */
9990 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9991 uint32_t record_buf[8], record_buf_mem[8];
9992 uint32_t reg_src1 = 0;
9993 struct regcache *reg_cache = arm_insn_r->regcache;
9994 ULONGEST u_regval = 0;
9995
9996 gdb_assert (!INSN_RECORDED(arm_insn_r));
9997 /* Handle unconditional insn extension space. */
9998
9999 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10000 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10001 if (arm_insn_r->cond)
10002 {
10003 /* PLD has no affect on architectural state, it just affects
10004 the caches. */
10005 if (5 == ((opcode1 & 0xE0) >> 5))
10006 {
10007 /* BLX(1) */
10008 record_buf[0] = ARM_PS_REGNUM;
10009 record_buf[1] = ARM_LR_REGNUM;
10010 arm_insn_r->reg_rec_count = 2;
10011 }
10012 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10013 }
10014
10015
10016 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10017 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10018 {
10019 ret = -1;
10020 /* Undefined instruction on ARM V5; need to handle if later
10021 versions define it. */
10022 }
10023
10024 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10025 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10026 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10027
10028 /* Handle arithmetic insn extension space. */
10029 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10030 && !INSN_RECORDED(arm_insn_r))
10031 {
10032 /* Handle MLA(S) and MUL(S). */
10033 if (in_inclusive_range (insn_op1, 0U, 3U))
10034 {
10035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10036 record_buf[1] = ARM_PS_REGNUM;
10037 arm_insn_r->reg_rec_count = 2;
10038 }
10039 else if (in_inclusive_range (insn_op1, 4U, 15U))
10040 {
10041 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10042 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10043 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10044 record_buf[2] = ARM_PS_REGNUM;
10045 arm_insn_r->reg_rec_count = 3;
10046 }
10047 }
10048
10049 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10050 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10051 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10052
10053 /* Handle control insn extension space. */
10054
10055 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10056 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10057 {
10058 if (!bit (arm_insn_r->arm_insn,25))
10059 {
10060 if (!bits (arm_insn_r->arm_insn, 4, 7))
10061 {
10062 if ((0 == insn_op1) || (2 == insn_op1))
10063 {
10064 /* MRS. */
10065 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10066 arm_insn_r->reg_rec_count = 1;
10067 }
10068 else if (1 == insn_op1)
10069 {
10070 /* CSPR is going to be changed. */
10071 record_buf[0] = ARM_PS_REGNUM;
10072 arm_insn_r->reg_rec_count = 1;
10073 }
10074 else if (3 == insn_op1)
10075 {
10076 /* SPSR is going to be changed. */
10077 /* We need to get SPSR value, which is yet to be done. */
10078 return -1;
10079 }
10080 }
10081 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10082 {
10083 if (1 == insn_op1)
10084 {
10085 /* BX. */
10086 record_buf[0] = ARM_PS_REGNUM;
10087 arm_insn_r->reg_rec_count = 1;
10088 }
10089 else if (3 == insn_op1)
10090 {
10091 /* CLZ. */
10092 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10093 arm_insn_r->reg_rec_count = 1;
10094 }
10095 }
10096 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10097 {
10098 /* BLX. */
10099 record_buf[0] = ARM_PS_REGNUM;
10100 record_buf[1] = ARM_LR_REGNUM;
10101 arm_insn_r->reg_rec_count = 2;
10102 }
10103 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10104 {
10105 /* QADD, QSUB, QDADD, QDSUB */
10106 record_buf[0] = ARM_PS_REGNUM;
10107 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10108 arm_insn_r->reg_rec_count = 2;
10109 }
10110 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10111 {
10112 /* BKPT. */
10113 record_buf[0] = ARM_PS_REGNUM;
10114 record_buf[1] = ARM_LR_REGNUM;
10115 arm_insn_r->reg_rec_count = 2;
10116
10117 /* Save SPSR also;how? */
10118 return -1;
10119 }
10120 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10121 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10122 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10123 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10124 )
10125 {
10126 if (0 == insn_op1 || 1 == insn_op1)
10127 {
10128 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10129 /* We dont do optimization for SMULW<y> where we
10130 need only Rd. */
10131 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10132 record_buf[1] = ARM_PS_REGNUM;
10133 arm_insn_r->reg_rec_count = 2;
10134 }
10135 else if (2 == insn_op1)
10136 {
10137 /* SMLAL<x><y>. */
10138 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10139 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10140 arm_insn_r->reg_rec_count = 2;
10141 }
10142 else if (3 == insn_op1)
10143 {
10144 /* SMUL<x><y>. */
10145 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10146 arm_insn_r->reg_rec_count = 1;
10147 }
10148 }
10149 }
10150 else
10151 {
10152 /* MSR : immediate form. */
10153 if (1 == insn_op1)
10154 {
10155 /* CSPR is going to be changed. */
10156 record_buf[0] = ARM_PS_REGNUM;
10157 arm_insn_r->reg_rec_count = 1;
10158 }
10159 else if (3 == insn_op1)
10160 {
10161 /* SPSR is going to be changed. */
10162 /* we need to get SPSR value, which is yet to be done */
10163 return -1;
10164 }
10165 }
10166 }
10167
10168 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10169 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10170 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10171
10172 /* Handle load/store insn extension space. */
10173
10174 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10175 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10176 && !INSN_RECORDED(arm_insn_r))
10177 {
10178 /* SWP/SWPB. */
10179 if (0 == insn_op1)
10180 {
10181 /* These insn, changes register and memory as well. */
10182 /* SWP or SWPB insn. */
10183 /* Get memory address given by Rn. */
10184 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10185 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10186 /* SWP insn ?, swaps word. */
10187 if (8 == arm_insn_r->opcode)
10188 {
10189 record_buf_mem[0] = 4;
10190 }
10191 else
10192 {
10193 /* SWPB insn, swaps only byte. */
10194 record_buf_mem[0] = 1;
10195 }
10196 record_buf_mem[1] = u_regval;
10197 arm_insn_r->mem_rec_count = 1;
10198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10199 arm_insn_r->reg_rec_count = 1;
10200 }
10201 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10202 {
10203 /* STRH. */
10204 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10205 ARM_RECORD_STRH);
10206 }
10207 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10208 {
10209 /* LDRD. */
10210 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10211 record_buf[1] = record_buf[0] + 1;
10212 arm_insn_r->reg_rec_count = 2;
10213 }
10214 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10215 {
10216 /* STRD. */
10217 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10218 ARM_RECORD_STRD);
10219 }
10220 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10221 {
10222 /* LDRH, LDRSB, LDRSH. */
10223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10224 arm_insn_r->reg_rec_count = 1;
10225 }
10226
10227 }
10228
10229 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10230 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10231 && !INSN_RECORDED(arm_insn_r))
10232 {
10233 ret = -1;
10234 /* Handle coprocessor insn extension space. */
10235 }
10236
10237 /* To be done for ARMv5 and later; as of now we return -1. */
10238 if (-1 == ret)
10239 return ret;
10240
10241 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10242 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10243
10244 return ret;
10245 }
10246
10247 /* Handling opcode 000 insns. */
10248
10249 static int
10250 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10251 {
10252 struct regcache *reg_cache = arm_insn_r->regcache;
10253 uint32_t record_buf[8], record_buf_mem[8];
10254 ULONGEST u_regval[2] = {0};
10255
10256 uint32_t reg_src1 = 0;
10257 uint32_t opcode1 = 0;
10258
10259 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10260 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10261 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10262
10263 if (!((opcode1 & 0x19) == 0x10))
10264 {
10265 /* Data-processing (register) and Data-processing (register-shifted
10266 register */
10267 /* Out of 11 shifter operands mode, all the insn modifies destination
10268 register, which is specified by 13-16 decode. */
10269 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10270 record_buf[1] = ARM_PS_REGNUM;
10271 arm_insn_r->reg_rec_count = 2;
10272 }
10273 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10274 {
10275 /* Miscellaneous instructions */
10276
10277 if (3 == arm_insn_r->decode && 0x12 == opcode1
10278 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10279 {
10280 /* Handle BLX, branch and link/exchange. */
10281 if (9 == arm_insn_r->opcode)
10282 {
10283 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10284 and R14 stores the return address. */
10285 record_buf[0] = ARM_PS_REGNUM;
10286 record_buf[1] = ARM_LR_REGNUM;
10287 arm_insn_r->reg_rec_count = 2;
10288 }
10289 }
10290 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10291 {
10292 /* Handle enhanced software breakpoint insn, BKPT. */
10293 /* CPSR is changed to be executed in ARM state, disabling normal
10294 interrupts, entering abort mode. */
10295 /* According to high vector configuration PC is set. */
10296 /* user hit breakpoint and type reverse, in
10297 that case, we need to go back with previous CPSR and
10298 Program Counter. */
10299 record_buf[0] = ARM_PS_REGNUM;
10300 record_buf[1] = ARM_LR_REGNUM;
10301 arm_insn_r->reg_rec_count = 2;
10302
10303 /* Save SPSR also; how? */
10304 return -1;
10305 }
10306 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10307 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10308 {
10309 /* Handle BX, branch and link/exchange. */
10310 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10311 record_buf[0] = ARM_PS_REGNUM;
10312 arm_insn_r->reg_rec_count = 1;
10313 }
10314 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10315 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10316 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10317 {
10318 /* Count leading zeros: CLZ. */
10319 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10320 arm_insn_r->reg_rec_count = 1;
10321 }
10322 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10323 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10324 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10325 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10326 {
10327 /* Handle MRS insn. */
10328 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10329 arm_insn_r->reg_rec_count = 1;
10330 }
10331 }
10332 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10333 {
10334 /* Multiply and multiply-accumulate */
10335
10336 /* Handle multiply instructions. */
10337 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10338 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10339 {
10340 /* Handle MLA and MUL. */
10341 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10342 record_buf[1] = ARM_PS_REGNUM;
10343 arm_insn_r->reg_rec_count = 2;
10344 }
10345 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10346 {
10347 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10348 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10349 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10350 record_buf[2] = ARM_PS_REGNUM;
10351 arm_insn_r->reg_rec_count = 3;
10352 }
10353 }
10354 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10355 {
10356 /* Synchronization primitives */
10357
10358 /* Handling SWP, SWPB. */
10359 /* These insn, changes register and memory as well. */
10360 /* SWP or SWPB insn. */
10361
10362 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10363 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10364 /* SWP insn ?, swaps word. */
10365 if (8 == arm_insn_r->opcode)
10366 {
10367 record_buf_mem[0] = 4;
10368 }
10369 else
10370 {
10371 /* SWPB insn, swaps only byte. */
10372 record_buf_mem[0] = 1;
10373 }
10374 record_buf_mem[1] = u_regval[0];
10375 arm_insn_r->mem_rec_count = 1;
10376 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10377 arm_insn_r->reg_rec_count = 1;
10378 }
10379 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10380 || 15 == arm_insn_r->decode)
10381 {
10382 if ((opcode1 & 0x12) == 2)
10383 {
10384 /* Extra load/store (unprivileged) */
10385 return -1;
10386 }
10387 else
10388 {
10389 /* Extra load/store */
10390 switch (bits (arm_insn_r->arm_insn, 5, 6))
10391 {
10392 case 1:
10393 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10394 {
10395 /* STRH (register), STRH (immediate) */
10396 arm_record_strx (arm_insn_r, &record_buf[0],
10397 &record_buf_mem[0], ARM_RECORD_STRH);
10398 }
10399 else if ((opcode1 & 0x05) == 0x1)
10400 {
10401 /* LDRH (register) */
10402 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10403 arm_insn_r->reg_rec_count = 1;
10404
10405 if (bit (arm_insn_r->arm_insn, 21))
10406 {
10407 /* Write back to Rn. */
10408 record_buf[arm_insn_r->reg_rec_count++]
10409 = bits (arm_insn_r->arm_insn, 16, 19);
10410 }
10411 }
10412 else if ((opcode1 & 0x05) == 0x5)
10413 {
10414 /* LDRH (immediate), LDRH (literal) */
10415 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10416
10417 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10418 arm_insn_r->reg_rec_count = 1;
10419
10420 if (rn != 15)
10421 {
10422 /*LDRH (immediate) */
10423 if (bit (arm_insn_r->arm_insn, 21))
10424 {
10425 /* Write back to Rn. */
10426 record_buf[arm_insn_r->reg_rec_count++] = rn;
10427 }
10428 }
10429 }
10430 else
10431 return -1;
10432 break;
10433 case 2:
10434 if ((opcode1 & 0x05) == 0x0)
10435 {
10436 /* LDRD (register) */
10437 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10438 record_buf[1] = record_buf[0] + 1;
10439 arm_insn_r->reg_rec_count = 2;
10440
10441 if (bit (arm_insn_r->arm_insn, 21))
10442 {
10443 /* Write back to Rn. */
10444 record_buf[arm_insn_r->reg_rec_count++]
10445 = bits (arm_insn_r->arm_insn, 16, 19);
10446 }
10447 }
10448 else if ((opcode1 & 0x05) == 0x1)
10449 {
10450 /* LDRSB (register) */
10451 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10452 arm_insn_r->reg_rec_count = 1;
10453
10454 if (bit (arm_insn_r->arm_insn, 21))
10455 {
10456 /* Write back to Rn. */
10457 record_buf[arm_insn_r->reg_rec_count++]
10458 = bits (arm_insn_r->arm_insn, 16, 19);
10459 }
10460 }
10461 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10462 {
10463 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10464 LDRSB (literal) */
10465 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10466
10467 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10468 arm_insn_r->reg_rec_count = 1;
10469
10470 if (rn != 15)
10471 {
10472 /*LDRD (immediate), LDRSB (immediate) */
10473 if (bit (arm_insn_r->arm_insn, 21))
10474 {
10475 /* Write back to Rn. */
10476 record_buf[arm_insn_r->reg_rec_count++] = rn;
10477 }
10478 }
10479 }
10480 else
10481 return -1;
10482 break;
10483 case 3:
10484 if ((opcode1 & 0x05) == 0x0)
10485 {
10486 /* STRD (register) */
10487 arm_record_strx (arm_insn_r, &record_buf[0],
10488 &record_buf_mem[0], ARM_RECORD_STRD);
10489 }
10490 else if ((opcode1 & 0x05) == 0x1)
10491 {
10492 /* LDRSH (register) */
10493 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10494 arm_insn_r->reg_rec_count = 1;
10495
10496 if (bit (arm_insn_r->arm_insn, 21))
10497 {
10498 /* Write back to Rn. */
10499 record_buf[arm_insn_r->reg_rec_count++]
10500 = bits (arm_insn_r->arm_insn, 16, 19);
10501 }
10502 }
10503 else if ((opcode1 & 0x05) == 0x4)
10504 {
10505 /* STRD (immediate) */
10506 arm_record_strx (arm_insn_r, &record_buf[0],
10507 &record_buf_mem[0], ARM_RECORD_STRD);
10508 }
10509 else if ((opcode1 & 0x05) == 0x5)
10510 {
10511 /* LDRSH (immediate), LDRSH (literal) */
10512 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10513 arm_insn_r->reg_rec_count = 1;
10514
10515 if (bit (arm_insn_r->arm_insn, 21))
10516 {
10517 /* Write back to Rn. */
10518 record_buf[arm_insn_r->reg_rec_count++]
10519 = bits (arm_insn_r->arm_insn, 16, 19);
10520 }
10521 }
10522 else
10523 return -1;
10524 break;
10525 default:
10526 return -1;
10527 }
10528 }
10529 }
10530 else
10531 {
10532 return -1;
10533 }
10534
10535 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10536 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10537 return 0;
10538 }
10539
10540 /* Handling opcode 001 insns. */
10541
10542 static int
10543 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10544 {
10545 uint32_t record_buf[8], record_buf_mem[8];
10546
10547 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10548 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10549
10550 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10551 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10552 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10553 )
10554 {
10555 /* Handle MSR insn. */
10556 if (9 == arm_insn_r->opcode)
10557 {
10558 /* CSPR is going to be changed. */
10559 record_buf[0] = ARM_PS_REGNUM;
10560 arm_insn_r->reg_rec_count = 1;
10561 }
10562 else
10563 {
10564 /* SPSR is going to be changed. */
10565 }
10566 }
10567 else if (arm_insn_r->opcode <= 15)
10568 {
10569 /* Normal data processing insns. */
10570 /* Out of 11 shifter operands mode, all the insn modifies destination
10571 register, which is specified by 13-16 decode. */
10572 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10573 record_buf[1] = ARM_PS_REGNUM;
10574 arm_insn_r->reg_rec_count = 2;
10575 }
10576 else
10577 {
10578 return -1;
10579 }
10580
10581 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10582 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10583 return 0;
10584 }
10585
10586 static int
10587 arm_record_media (insn_decode_record *arm_insn_r)
10588 {
10589 uint32_t record_buf[8];
10590
10591 switch (bits (arm_insn_r->arm_insn, 22, 24))
10592 {
10593 case 0:
10594 /* Parallel addition and subtraction, signed */
10595 case 1:
10596 /* Parallel addition and subtraction, unsigned */
10597 case 2:
10598 case 3:
10599 /* Packing, unpacking, saturation and reversal */
10600 {
10601 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10602
10603 record_buf[arm_insn_r->reg_rec_count++] = rd;
10604 }
10605 break;
10606
10607 case 4:
10608 case 5:
10609 /* Signed multiplies */
10610 {
10611 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10612 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10613
10614 record_buf[arm_insn_r->reg_rec_count++] = rd;
10615 if (op1 == 0x0)
10616 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10617 else if (op1 == 0x4)
10618 record_buf[arm_insn_r->reg_rec_count++]
10619 = bits (arm_insn_r->arm_insn, 12, 15);
10620 }
10621 break;
10622
10623 case 6:
10624 {
10625 if (bit (arm_insn_r->arm_insn, 21)
10626 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10627 {
10628 /* SBFX */
10629 record_buf[arm_insn_r->reg_rec_count++]
10630 = bits (arm_insn_r->arm_insn, 12, 15);
10631 }
10632 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10633 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10634 {
10635 /* USAD8 and USADA8 */
10636 record_buf[arm_insn_r->reg_rec_count++]
10637 = bits (arm_insn_r->arm_insn, 16, 19);
10638 }
10639 }
10640 break;
10641
10642 case 7:
10643 {
10644 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10645 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10646 {
10647 /* Permanently UNDEFINED */
10648 return -1;
10649 }
10650 else
10651 {
10652 /* BFC, BFI and UBFX */
10653 record_buf[arm_insn_r->reg_rec_count++]
10654 = bits (arm_insn_r->arm_insn, 12, 15);
10655 }
10656 }
10657 break;
10658
10659 default:
10660 return -1;
10661 }
10662
10663 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10664
10665 return 0;
10666 }
10667
10668 /* Handle ARM mode instructions with opcode 010. */
10669
10670 static int
10671 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10672 {
10673 struct regcache *reg_cache = arm_insn_r->regcache;
10674
10675 uint32_t reg_base , reg_dest;
10676 uint32_t offset_12, tgt_mem_addr;
10677 uint32_t record_buf[8], record_buf_mem[8];
10678 unsigned char wback;
10679 ULONGEST u_regval;
10680
10681 /* Calculate wback. */
10682 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10683 || (bit (arm_insn_r->arm_insn, 21) == 1);
10684
10685 arm_insn_r->reg_rec_count = 0;
10686 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10687
10688 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10689 {
10690 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10691 and LDRT. */
10692
10693 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10694 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10695
10696 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10697 preceeds a LDR instruction having R15 as reg_base, it
10698 emulates a branch and link instruction, and hence we need to save
10699 CPSR and PC as well. */
10700 if (ARM_PC_REGNUM == reg_dest)
10701 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10702
10703 /* If wback is true, also save the base register, which is going to be
10704 written to. */
10705 if (wback)
10706 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10707 }
10708 else
10709 {
10710 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10711
10712 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10713 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10714
10715 /* Handle bit U. */
10716 if (bit (arm_insn_r->arm_insn, 23))
10717 {
10718 /* U == 1: Add the offset. */
10719 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10720 }
10721 else
10722 {
10723 /* U == 0: subtract the offset. */
10724 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10725 }
10726
10727 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10728 bytes. */
10729 if (bit (arm_insn_r->arm_insn, 22))
10730 {
10731 /* STRB and STRBT: 1 byte. */
10732 record_buf_mem[0] = 1;
10733 }
10734 else
10735 {
10736 /* STR and STRT: 4 bytes. */
10737 record_buf_mem[0] = 4;
10738 }
10739
10740 /* Handle bit P. */
10741 if (bit (arm_insn_r->arm_insn, 24))
10742 record_buf_mem[1] = tgt_mem_addr;
10743 else
10744 record_buf_mem[1] = (uint32_t) u_regval;
10745
10746 arm_insn_r->mem_rec_count = 1;
10747
10748 /* If wback is true, also save the base register, which is going to be
10749 written to. */
10750 if (wback)
10751 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10752 }
10753
10754 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10755 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10756 return 0;
10757 }
10758
10759 /* Handling opcode 011 insns. */
10760
10761 static int
10762 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10763 {
10764 struct regcache *reg_cache = arm_insn_r->regcache;
10765
10766 uint32_t shift_imm = 0;
10767 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10768 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10769 uint32_t record_buf[8], record_buf_mem[8];
10770
10771 LONGEST s_word;
10772 ULONGEST u_regval[2];
10773
10774 if (bit (arm_insn_r->arm_insn, 4))
10775 return arm_record_media (arm_insn_r);
10776
10777 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10778 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10779
10780 /* Handle enhanced store insns and LDRD DSP insn,
10781 order begins according to addressing modes for store insns
10782 STRH insn. */
10783
10784 /* LDR or STR? */
10785 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10786 {
10787 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10788 /* LDR insn has a capability to do branching, if
10789 MOV LR, PC is preceded by LDR insn having Rn as R15
10790 in that case, it emulates branch and link insn, and hence we
10791 need to save CSPR and PC as well. */
10792 if (15 != reg_dest)
10793 {
10794 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10795 arm_insn_r->reg_rec_count = 1;
10796 }
10797 else
10798 {
10799 record_buf[0] = reg_dest;
10800 record_buf[1] = ARM_PS_REGNUM;
10801 arm_insn_r->reg_rec_count = 2;
10802 }
10803 }
10804 else
10805 {
10806 if (! bits (arm_insn_r->arm_insn, 4, 11))
10807 {
10808 /* Store insn, register offset and register pre-indexed,
10809 register post-indexed. */
10810 /* Get Rm. */
10811 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10812 /* Get Rn. */
10813 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10814 regcache_raw_read_unsigned (reg_cache, reg_src1
10815 , &u_regval[0]);
10816 regcache_raw_read_unsigned (reg_cache, reg_src2
10817 , &u_regval[1]);
10818 if (15 == reg_src2)
10819 {
10820 /* If R15 was used as Rn, hence current PC+8. */
10821 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10822 u_regval[0] = u_regval[0] + 8;
10823 }
10824 /* Calculate target store address, Rn +/- Rm, register offset. */
10825 /* U == 1. */
10826 if (bit (arm_insn_r->arm_insn, 23))
10827 {
10828 tgt_mem_addr = u_regval[0] + u_regval[1];
10829 }
10830 else
10831 {
10832 tgt_mem_addr = u_regval[1] - u_regval[0];
10833 }
10834
10835 switch (arm_insn_r->opcode)
10836 {
10837 /* STR. */
10838 case 8:
10839 case 12:
10840 /* STR. */
10841 case 9:
10842 case 13:
10843 /* STRT. */
10844 case 1:
10845 case 5:
10846 /* STR. */
10847 case 0:
10848 case 4:
10849 record_buf_mem[0] = 4;
10850 break;
10851
10852 /* STRB. */
10853 case 10:
10854 case 14:
10855 /* STRB. */
10856 case 11:
10857 case 15:
10858 /* STRBT. */
10859 case 3:
10860 case 7:
10861 /* STRB. */
10862 case 2:
10863 case 6:
10864 record_buf_mem[0] = 1;
10865 break;
10866
10867 default:
10868 gdb_assert_not_reached ("no decoding pattern found");
10869 break;
10870 }
10871 record_buf_mem[1] = tgt_mem_addr;
10872 arm_insn_r->mem_rec_count = 1;
10873
10874 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10875 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10876 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10877 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10878 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10879 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10880 )
10881 {
10882 /* Rn is going to be changed in pre-indexed mode and
10883 post-indexed mode as well. */
10884 record_buf[0] = reg_src2;
10885 arm_insn_r->reg_rec_count = 1;
10886 }
10887 }
10888 else
10889 {
10890 /* Store insn, scaled register offset; scaled pre-indexed. */
10891 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10892 /* Get Rm. */
10893 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10894 /* Get Rn. */
10895 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10896 /* Get shift_imm. */
10897 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10898 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10899 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10900 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10901 /* Offset_12 used as shift. */
10902 switch (offset_12)
10903 {
10904 case 0:
10905 /* Offset_12 used as index. */
10906 offset_12 = u_regval[0] << shift_imm;
10907 break;
10908
10909 case 1:
10910 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10911 break;
10912
10913 case 2:
10914 if (!shift_imm)
10915 {
10916 if (bit (u_regval[0], 31))
10917 {
10918 offset_12 = 0xFFFFFFFF;
10919 }
10920 else
10921 {
10922 offset_12 = 0;
10923 }
10924 }
10925 else
10926 {
10927 /* This is arithmetic shift. */
10928 offset_12 = s_word >> shift_imm;
10929 }
10930 break;
10931
10932 case 3:
10933 if (!shift_imm)
10934 {
10935 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10936 &u_regval[1]);
10937 /* Get C flag value and shift it by 31. */
10938 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10939 | (u_regval[0]) >> 1);
10940 }
10941 else
10942 {
10943 offset_12 = (u_regval[0] >> shift_imm) \
10944 | (u_regval[0] <<
10945 (sizeof(uint32_t) - shift_imm));
10946 }
10947 break;
10948
10949 default:
10950 gdb_assert_not_reached ("no decoding pattern found");
10951 break;
10952 }
10953
10954 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10955 /* bit U set. */
10956 if (bit (arm_insn_r->arm_insn, 23))
10957 {
10958 tgt_mem_addr = u_regval[1] + offset_12;
10959 }
10960 else
10961 {
10962 tgt_mem_addr = u_regval[1] - offset_12;
10963 }
10964
10965 switch (arm_insn_r->opcode)
10966 {
10967 /* STR. */
10968 case 8:
10969 case 12:
10970 /* STR. */
10971 case 9:
10972 case 13:
10973 /* STRT. */
10974 case 1:
10975 case 5:
10976 /* STR. */
10977 case 0:
10978 case 4:
10979 record_buf_mem[0] = 4;
10980 break;
10981
10982 /* STRB. */
10983 case 10:
10984 case 14:
10985 /* STRB. */
10986 case 11:
10987 case 15:
10988 /* STRBT. */
10989 case 3:
10990 case 7:
10991 /* STRB. */
10992 case 2:
10993 case 6:
10994 record_buf_mem[0] = 1;
10995 break;
10996
10997 default:
10998 gdb_assert_not_reached ("no decoding pattern found");
10999 break;
11000 }
11001 record_buf_mem[1] = tgt_mem_addr;
11002 arm_insn_r->mem_rec_count = 1;
11003
11004 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11005 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11006 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11007 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11008 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11009 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11010 )
11011 {
11012 /* Rn is going to be changed in register scaled pre-indexed
11013 mode,and scaled post indexed mode. */
11014 record_buf[0] = reg_src2;
11015 arm_insn_r->reg_rec_count = 1;
11016 }
11017 }
11018 }
11019
11020 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11021 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11022 return 0;
11023 }
11024
11025 /* Handle ARM mode instructions with opcode 100. */
11026
11027 static int
11028 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11029 {
11030 struct regcache *reg_cache = arm_insn_r->regcache;
11031 uint32_t register_count = 0, register_bits;
11032 uint32_t reg_base, addr_mode;
11033 uint32_t record_buf[24], record_buf_mem[48];
11034 uint32_t wback;
11035 ULONGEST u_regval;
11036
11037 /* Fetch the list of registers. */
11038 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11039 arm_insn_r->reg_rec_count = 0;
11040
11041 /* Fetch the base register that contains the address we are loading data
11042 to. */
11043 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11044
11045 /* Calculate wback. */
11046 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11047
11048 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11049 {
11050 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11051
11052 /* Find out which registers are going to be loaded from memory. */
11053 while (register_bits)
11054 {
11055 if (register_bits & 0x00000001)
11056 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11057 register_bits = register_bits >> 1;
11058 register_count++;
11059 }
11060
11061
11062 /* If wback is true, also save the base register, which is going to be
11063 written to. */
11064 if (wback)
11065 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11066
11067 /* Save the CPSR register. */
11068 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11069 }
11070 else
11071 {
11072 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11073
11074 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11075
11076 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11077
11078 /* Find out how many registers are going to be stored to memory. */
11079 while (register_bits)
11080 {
11081 if (register_bits & 0x00000001)
11082 register_count++;
11083 register_bits = register_bits >> 1;
11084 }
11085
11086 switch (addr_mode)
11087 {
11088 /* STMDA (STMED): Decrement after. */
11089 case 0:
11090 record_buf_mem[1] = (uint32_t) u_regval
11091 - register_count * ARM_INT_REGISTER_SIZE + 4;
11092 break;
11093 /* STM (STMIA, STMEA): Increment after. */
11094 case 1:
11095 record_buf_mem[1] = (uint32_t) u_regval;
11096 break;
11097 /* STMDB (STMFD): Decrement before. */
11098 case 2:
11099 record_buf_mem[1] = (uint32_t) u_regval
11100 - register_count * ARM_INT_REGISTER_SIZE;
11101 break;
11102 /* STMIB (STMFA): Increment before. */
11103 case 3:
11104 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11105 break;
11106 default:
11107 gdb_assert_not_reached ("no decoding pattern found");
11108 break;
11109 }
11110
11111 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11112 arm_insn_r->mem_rec_count = 1;
11113
11114 /* If wback is true, also save the base register, which is going to be
11115 written to. */
11116 if (wback)
11117 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11118 }
11119
11120 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11121 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11122 return 0;
11123 }
11124
11125 /* Handling opcode 101 insns. */
11126
11127 static int
11128 arm_record_b_bl (insn_decode_record *arm_insn_r)
11129 {
11130 uint32_t record_buf[8];
11131
11132 /* Handle B, BL, BLX(1) insns. */
11133 /* B simply branches so we do nothing here. */
11134 /* Note: BLX(1) doesnt fall here but instead it falls into
11135 extension space. */
11136 if (bit (arm_insn_r->arm_insn, 24))
11137 {
11138 record_buf[0] = ARM_LR_REGNUM;
11139 arm_insn_r->reg_rec_count = 1;
11140 }
11141
11142 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11143
11144 return 0;
11145 }
11146
11147 static int
11148 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11149 {
11150 printf_unfiltered (_("Process record does not support instruction "
11151 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11152 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11153
11154 return -1;
11155 }
11156
11157 /* Record handler for vector data transfer instructions. */
11158
11159 static int
11160 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11161 {
11162 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11163 uint32_t record_buf[4];
11164
11165 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11166 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11167 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11168 bit_l = bit (arm_insn_r->arm_insn, 20);
11169 bit_c = bit (arm_insn_r->arm_insn, 8);
11170
11171 /* Handle VMOV instruction. */
11172 if (bit_l && bit_c)
11173 {
11174 record_buf[0] = reg_t;
11175 arm_insn_r->reg_rec_count = 1;
11176 }
11177 else if (bit_l && !bit_c)
11178 {
11179 /* Handle VMOV instruction. */
11180 if (bits_a == 0x00)
11181 {
11182 record_buf[0] = reg_t;
11183 arm_insn_r->reg_rec_count = 1;
11184 }
11185 /* Handle VMRS instruction. */
11186 else if (bits_a == 0x07)
11187 {
11188 if (reg_t == 15)
11189 reg_t = ARM_PS_REGNUM;
11190
11191 record_buf[0] = reg_t;
11192 arm_insn_r->reg_rec_count = 1;
11193 }
11194 }
11195 else if (!bit_l && !bit_c)
11196 {
11197 /* Handle VMOV instruction. */
11198 if (bits_a == 0x00)
11199 {
11200 record_buf[0] = ARM_D0_REGNUM + reg_v;
11201
11202 arm_insn_r->reg_rec_count = 1;
11203 }
11204 /* Handle VMSR instruction. */
11205 else if (bits_a == 0x07)
11206 {
11207 record_buf[0] = ARM_FPSCR_REGNUM;
11208 arm_insn_r->reg_rec_count = 1;
11209 }
11210 }
11211 else if (!bit_l && bit_c)
11212 {
11213 /* Handle VMOV instruction. */
11214 if (!(bits_a & 0x04))
11215 {
11216 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11217 + ARM_D0_REGNUM;
11218 arm_insn_r->reg_rec_count = 1;
11219 }
11220 /* Handle VDUP instruction. */
11221 else
11222 {
11223 if (bit (arm_insn_r->arm_insn, 21))
11224 {
11225 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11226 record_buf[0] = reg_v + ARM_D0_REGNUM;
11227 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11228 arm_insn_r->reg_rec_count = 2;
11229 }
11230 else
11231 {
11232 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11233 record_buf[0] = reg_v + ARM_D0_REGNUM;
11234 arm_insn_r->reg_rec_count = 1;
11235 }
11236 }
11237 }
11238
11239 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11240 return 0;
11241 }
11242
11243 /* Record handler for extension register load/store instructions. */
11244
11245 static int
11246 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11247 {
11248 uint32_t opcode, single_reg;
11249 uint8_t op_vldm_vstm;
11250 uint32_t record_buf[8], record_buf_mem[128];
11251 ULONGEST u_regval = 0;
11252
11253 struct regcache *reg_cache = arm_insn_r->regcache;
11254
11255 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11256 single_reg = !bit (arm_insn_r->arm_insn, 8);
11257 op_vldm_vstm = opcode & 0x1b;
11258
11259 /* Handle VMOV instructions. */
11260 if ((opcode & 0x1e) == 0x04)
11261 {
11262 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11263 {
11264 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11265 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11266 arm_insn_r->reg_rec_count = 2;
11267 }
11268 else
11269 {
11270 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11271 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11272
11273 if (single_reg)
11274 {
11275 /* The first S register number m is REG_M:M (M is bit 5),
11276 the corresponding D register number is REG_M:M / 2, which
11277 is REG_M. */
11278 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11279 /* The second S register number is REG_M:M + 1, the
11280 corresponding D register number is (REG_M:M + 1) / 2.
11281 IOW, if bit M is 1, the first and second S registers
11282 are mapped to different D registers, otherwise, they are
11283 in the same D register. */
11284 if (bit_m)
11285 {
11286 record_buf[arm_insn_r->reg_rec_count++]
11287 = ARM_D0_REGNUM + reg_m + 1;
11288 }
11289 }
11290 else
11291 {
11292 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11293 arm_insn_r->reg_rec_count = 1;
11294 }
11295 }
11296 }
11297 /* Handle VSTM and VPUSH instructions. */
11298 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11299 || op_vldm_vstm == 0x12)
11300 {
11301 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11302 uint32_t memory_index = 0;
11303
11304 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11305 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11306 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11307 imm_off32 = imm_off8 << 2;
11308 memory_count = imm_off8;
11309
11310 if (bit (arm_insn_r->arm_insn, 23))
11311 start_address = u_regval;
11312 else
11313 start_address = u_regval - imm_off32;
11314
11315 if (bit (arm_insn_r->arm_insn, 21))
11316 {
11317 record_buf[0] = reg_rn;
11318 arm_insn_r->reg_rec_count = 1;
11319 }
11320
11321 while (memory_count > 0)
11322 {
11323 if (single_reg)
11324 {
11325 record_buf_mem[memory_index] = 4;
11326 record_buf_mem[memory_index + 1] = start_address;
11327 start_address = start_address + 4;
11328 memory_index = memory_index + 2;
11329 }
11330 else
11331 {
11332 record_buf_mem[memory_index] = 4;
11333 record_buf_mem[memory_index + 1] = start_address;
11334 record_buf_mem[memory_index + 2] = 4;
11335 record_buf_mem[memory_index + 3] = start_address + 4;
11336 start_address = start_address + 8;
11337 memory_index = memory_index + 4;
11338 }
11339 memory_count--;
11340 }
11341 arm_insn_r->mem_rec_count = (memory_index >> 1);
11342 }
11343 /* Handle VLDM instructions. */
11344 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11345 || op_vldm_vstm == 0x13)
11346 {
11347 uint32_t reg_count, reg_vd;
11348 uint32_t reg_index = 0;
11349 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11350
11351 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11352 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11353
11354 /* REG_VD is the first D register number. If the instruction
11355 loads memory to S registers (SINGLE_REG is TRUE), the register
11356 number is (REG_VD << 1 | bit D), so the corresponding D
11357 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11358 if (!single_reg)
11359 reg_vd = reg_vd | (bit_d << 4);
11360
11361 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11362 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11363
11364 /* If the instruction loads memory to D register, REG_COUNT should
11365 be divided by 2, according to the ARM Architecture Reference
11366 Manual. If the instruction loads memory to S register, divide by
11367 2 as well because two S registers are mapped to D register. */
11368 reg_count = reg_count / 2;
11369 if (single_reg && bit_d)
11370 {
11371 /* Increase the register count if S register list starts from
11372 an odd number (bit d is one). */
11373 reg_count++;
11374 }
11375
11376 while (reg_count > 0)
11377 {
11378 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11379 reg_count--;
11380 }
11381 arm_insn_r->reg_rec_count = reg_index;
11382 }
11383 /* VSTR Vector store register. */
11384 else if ((opcode & 0x13) == 0x10)
11385 {
11386 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11387 uint32_t memory_index = 0;
11388
11389 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11390 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11391 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11392 imm_off32 = imm_off8 << 2;
11393
11394 if (bit (arm_insn_r->arm_insn, 23))
11395 start_address = u_regval + imm_off32;
11396 else
11397 start_address = u_regval - imm_off32;
11398
11399 if (single_reg)
11400 {
11401 record_buf_mem[memory_index] = 4;
11402 record_buf_mem[memory_index + 1] = start_address;
11403 arm_insn_r->mem_rec_count = 1;
11404 }
11405 else
11406 {
11407 record_buf_mem[memory_index] = 4;
11408 record_buf_mem[memory_index + 1] = start_address;
11409 record_buf_mem[memory_index + 2] = 4;
11410 record_buf_mem[memory_index + 3] = start_address + 4;
11411 arm_insn_r->mem_rec_count = 2;
11412 }
11413 }
11414 /* VLDR Vector load register. */
11415 else if ((opcode & 0x13) == 0x11)
11416 {
11417 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11418
11419 if (!single_reg)
11420 {
11421 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11422 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11423 }
11424 else
11425 {
11426 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11427 /* Record register D rather than pseudo register S. */
11428 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11429 }
11430 arm_insn_r->reg_rec_count = 1;
11431 }
11432
11433 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11434 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11435 return 0;
11436 }
11437
11438 /* Record handler for arm/thumb mode VFP data processing instructions. */
11439
11440 static int
11441 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11442 {
11443 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11444 uint32_t record_buf[4];
11445 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11446 enum insn_types curr_insn_type = INSN_INV;
11447
11448 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11449 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11450 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11451 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11452 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11453 bit_d = bit (arm_insn_r->arm_insn, 22);
11454 /* Mask off the "D" bit. */
11455 opc1 = opc1 & ~0x04;
11456
11457 /* Handle VMLA, VMLS. */
11458 if (opc1 == 0x00)
11459 {
11460 if (bit (arm_insn_r->arm_insn, 10))
11461 {
11462 if (bit (arm_insn_r->arm_insn, 6))
11463 curr_insn_type = INSN_T0;
11464 else
11465 curr_insn_type = INSN_T1;
11466 }
11467 else
11468 {
11469 if (dp_op_sz)
11470 curr_insn_type = INSN_T1;
11471 else
11472 curr_insn_type = INSN_T2;
11473 }
11474 }
11475 /* Handle VNMLA, VNMLS, VNMUL. */
11476 else if (opc1 == 0x01)
11477 {
11478 if (dp_op_sz)
11479 curr_insn_type = INSN_T1;
11480 else
11481 curr_insn_type = INSN_T2;
11482 }
11483 /* Handle VMUL. */
11484 else if (opc1 == 0x02 && !(opc3 & 0x01))
11485 {
11486 if (bit (arm_insn_r->arm_insn, 10))
11487 {
11488 if (bit (arm_insn_r->arm_insn, 6))
11489 curr_insn_type = INSN_T0;
11490 else
11491 curr_insn_type = INSN_T1;
11492 }
11493 else
11494 {
11495 if (dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 }
11501 /* Handle VADD, VSUB. */
11502 else if (opc1 == 0x03)
11503 {
11504 if (!bit (arm_insn_r->arm_insn, 9))
11505 {
11506 if (bit (arm_insn_r->arm_insn, 6))
11507 curr_insn_type = INSN_T0;
11508 else
11509 curr_insn_type = INSN_T1;
11510 }
11511 else
11512 {
11513 if (dp_op_sz)
11514 curr_insn_type = INSN_T1;
11515 else
11516 curr_insn_type = INSN_T2;
11517 }
11518 }
11519 /* Handle VDIV. */
11520 else if (opc1 == 0x08)
11521 {
11522 if (dp_op_sz)
11523 curr_insn_type = INSN_T1;
11524 else
11525 curr_insn_type = INSN_T2;
11526 }
11527 /* Handle all other vfp data processing instructions. */
11528 else if (opc1 == 0x0b)
11529 {
11530 /* Handle VMOV. */
11531 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11532 {
11533 if (bit (arm_insn_r->arm_insn, 4))
11534 {
11535 if (bit (arm_insn_r->arm_insn, 6))
11536 curr_insn_type = INSN_T0;
11537 else
11538 curr_insn_type = INSN_T1;
11539 }
11540 else
11541 {
11542 if (dp_op_sz)
11543 curr_insn_type = INSN_T1;
11544 else
11545 curr_insn_type = INSN_T2;
11546 }
11547 }
11548 /* Handle VNEG and VABS. */
11549 else if ((opc2 == 0x01 && opc3 == 0x01)
11550 || (opc2 == 0x00 && opc3 == 0x03))
11551 {
11552 if (!bit (arm_insn_r->arm_insn, 11))
11553 {
11554 if (bit (arm_insn_r->arm_insn, 6))
11555 curr_insn_type = INSN_T0;
11556 else
11557 curr_insn_type = INSN_T1;
11558 }
11559 else
11560 {
11561 if (dp_op_sz)
11562 curr_insn_type = INSN_T1;
11563 else
11564 curr_insn_type = INSN_T2;
11565 }
11566 }
11567 /* Handle VSQRT. */
11568 else if (opc2 == 0x01 && opc3 == 0x03)
11569 {
11570 if (dp_op_sz)
11571 curr_insn_type = INSN_T1;
11572 else
11573 curr_insn_type = INSN_T2;
11574 }
11575 /* Handle VCVT. */
11576 else if (opc2 == 0x07 && opc3 == 0x03)
11577 {
11578 if (!dp_op_sz)
11579 curr_insn_type = INSN_T1;
11580 else
11581 curr_insn_type = INSN_T2;
11582 }
11583 else if (opc3 & 0x01)
11584 {
11585 /* Handle VCVT. */
11586 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11587 {
11588 if (!bit (arm_insn_r->arm_insn, 18))
11589 curr_insn_type = INSN_T2;
11590 else
11591 {
11592 if (dp_op_sz)
11593 curr_insn_type = INSN_T1;
11594 else
11595 curr_insn_type = INSN_T2;
11596 }
11597 }
11598 /* Handle VCVT. */
11599 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11600 {
11601 if (dp_op_sz)
11602 curr_insn_type = INSN_T1;
11603 else
11604 curr_insn_type = INSN_T2;
11605 }
11606 /* Handle VCVTB, VCVTT. */
11607 else if ((opc2 & 0x0e) == 0x02)
11608 curr_insn_type = INSN_T2;
11609 /* Handle VCMP, VCMPE. */
11610 else if ((opc2 & 0x0e) == 0x04)
11611 curr_insn_type = INSN_T3;
11612 }
11613 }
11614
11615 switch (curr_insn_type)
11616 {
11617 case INSN_T0:
11618 reg_vd = reg_vd | (bit_d << 4);
11619 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11620 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11621 arm_insn_r->reg_rec_count = 2;
11622 break;
11623
11624 case INSN_T1:
11625 reg_vd = reg_vd | (bit_d << 4);
11626 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11627 arm_insn_r->reg_rec_count = 1;
11628 break;
11629
11630 case INSN_T2:
11631 reg_vd = (reg_vd << 1) | bit_d;
11632 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11633 arm_insn_r->reg_rec_count = 1;
11634 break;
11635
11636 case INSN_T3:
11637 record_buf[0] = ARM_FPSCR_REGNUM;
11638 arm_insn_r->reg_rec_count = 1;
11639 break;
11640
11641 default:
11642 gdb_assert_not_reached ("no decoding pattern found");
11643 break;
11644 }
11645
11646 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11647 return 0;
11648 }
11649
11650 /* Handling opcode 110 insns. */
11651
11652 static int
11653 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11654 {
11655 uint32_t op1, op1_ebit, coproc;
11656
11657 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11658 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11659 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11660
11661 if ((coproc & 0x0e) == 0x0a)
11662 {
11663 /* Handle extension register ld/st instructions. */
11664 if (!(op1 & 0x20))
11665 return arm_record_exreg_ld_st_insn (arm_insn_r);
11666
11667 /* 64-bit transfers between arm core and extension registers. */
11668 if ((op1 & 0x3e) == 0x04)
11669 return arm_record_exreg_ld_st_insn (arm_insn_r);
11670 }
11671 else
11672 {
11673 /* Handle coprocessor ld/st instructions. */
11674 if (!(op1 & 0x3a))
11675 {
11676 /* Store. */
11677 if (!op1_ebit)
11678 return arm_record_unsupported_insn (arm_insn_r);
11679 else
11680 /* Load. */
11681 return arm_record_unsupported_insn (arm_insn_r);
11682 }
11683
11684 /* Move to coprocessor from two arm core registers. */
11685 if (op1 == 0x4)
11686 return arm_record_unsupported_insn (arm_insn_r);
11687
11688 /* Move to two arm core registers from coprocessor. */
11689 if (op1 == 0x5)
11690 {
11691 uint32_t reg_t[2];
11692
11693 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11694 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11695 arm_insn_r->reg_rec_count = 2;
11696
11697 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11698 return 0;
11699 }
11700 }
11701 return arm_record_unsupported_insn (arm_insn_r);
11702 }
11703
11704 /* Handling opcode 111 insns. */
11705
11706 static int
11707 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11708 {
11709 uint32_t op, op1_ebit, coproc, bits_24_25;
11710 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11711 struct regcache *reg_cache = arm_insn_r->regcache;
11712
11713 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11714 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11715 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11716 op = bit (arm_insn_r->arm_insn, 4);
11717 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11718
11719 /* Handle arm SWI/SVC system call instructions. */
11720 if (bits_24_25 == 0x3)
11721 {
11722 if (tdep->arm_syscall_record != NULL)
11723 {
11724 ULONGEST svc_operand, svc_number;
11725
11726 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11727
11728 if (svc_operand) /* OABI. */
11729 svc_number = svc_operand - 0x900000;
11730 else /* EABI. */
11731 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11732
11733 return tdep->arm_syscall_record (reg_cache, svc_number);
11734 }
11735 else
11736 {
11737 printf_unfiltered (_("no syscall record support\n"));
11738 return -1;
11739 }
11740 }
11741 else if (bits_24_25 == 0x02)
11742 {
11743 if (op)
11744 {
11745 if ((coproc & 0x0e) == 0x0a)
11746 {
11747 /* 8, 16, and 32-bit transfer */
11748 return arm_record_vdata_transfer_insn (arm_insn_r);
11749 }
11750 else
11751 {
11752 if (op1_ebit)
11753 {
11754 /* MRC, MRC2 */
11755 uint32_t record_buf[1];
11756
11757 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11758 if (record_buf[0] == 15)
11759 record_buf[0] = ARM_PS_REGNUM;
11760
11761 arm_insn_r->reg_rec_count = 1;
11762 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11763 record_buf);
11764 return 0;
11765 }
11766 else
11767 {
11768 /* MCR, MCR2 */
11769 return -1;
11770 }
11771 }
11772 }
11773 else
11774 {
11775 if ((coproc & 0x0e) == 0x0a)
11776 {
11777 /* VFP data-processing instructions. */
11778 return arm_record_vfp_data_proc_insn (arm_insn_r);
11779 }
11780 else
11781 {
11782 /* CDP, CDP2 */
11783 return -1;
11784 }
11785 }
11786 }
11787 else
11788 {
11789 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11790
11791 if (op1 == 5)
11792 {
11793 if ((coproc & 0x0e) != 0x0a)
11794 {
11795 /* MRRC, MRRC2 */
11796 return -1;
11797 }
11798 }
11799 else if (op1 == 4 || op1 == 5)
11800 {
11801 if ((coproc & 0x0e) == 0x0a)
11802 {
11803 /* 64-bit transfers between ARM core and extension */
11804 return -1;
11805 }
11806 else if (op1 == 4)
11807 {
11808 /* MCRR, MCRR2 */
11809 return -1;
11810 }
11811 }
11812 else if (op1 == 0 || op1 == 1)
11813 {
11814 /* UNDEFINED */
11815 return -1;
11816 }
11817 else
11818 {
11819 if ((coproc & 0x0e) == 0x0a)
11820 {
11821 /* Extension register load/store */
11822 }
11823 else
11824 {
11825 /* STC, STC2, LDC, LDC2 */
11826 }
11827 return -1;
11828 }
11829 }
11830
11831 return -1;
11832 }
11833
11834 /* Handling opcode 000 insns. */
11835
11836 static int
11837 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11838 {
11839 uint32_t record_buf[8];
11840 uint32_t reg_src1 = 0;
11841
11842 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11843
11844 record_buf[0] = ARM_PS_REGNUM;
11845 record_buf[1] = reg_src1;
11846 thumb_insn_r->reg_rec_count = 2;
11847
11848 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11849
11850 return 0;
11851 }
11852
11853
11854 /* Handling opcode 001 insns. */
11855
11856 static int
11857 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11858 {
11859 uint32_t record_buf[8];
11860 uint32_t reg_src1 = 0;
11861
11862 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11863
11864 record_buf[0] = ARM_PS_REGNUM;
11865 record_buf[1] = reg_src1;
11866 thumb_insn_r->reg_rec_count = 2;
11867
11868 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11869
11870 return 0;
11871 }
11872
11873 /* Handling opcode 010 insns. */
11874
11875 static int
11876 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11877 {
11878 struct regcache *reg_cache = thumb_insn_r->regcache;
11879 uint32_t record_buf[8], record_buf_mem[8];
11880
11881 uint32_t reg_src1 = 0, reg_src2 = 0;
11882 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11883
11884 ULONGEST u_regval[2] = {0};
11885
11886 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11887
11888 if (bit (thumb_insn_r->arm_insn, 12))
11889 {
11890 /* Handle load/store register offset. */
11891 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11892
11893 if (in_inclusive_range (opB, 4U, 7U))
11894 {
11895 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11896 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11897 record_buf[0] = reg_src1;
11898 thumb_insn_r->reg_rec_count = 1;
11899 }
11900 else if (in_inclusive_range (opB, 0U, 2U))
11901 {
11902 /* STR(2), STRB(2), STRH(2) . */
11903 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11904 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11905 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11906 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11907 if (0 == opB)
11908 record_buf_mem[0] = 4; /* STR (2). */
11909 else if (2 == opB)
11910 record_buf_mem[0] = 1; /* STRB (2). */
11911 else if (1 == opB)
11912 record_buf_mem[0] = 2; /* STRH (2). */
11913 record_buf_mem[1] = u_regval[0] + u_regval[1];
11914 thumb_insn_r->mem_rec_count = 1;
11915 }
11916 }
11917 else if (bit (thumb_insn_r->arm_insn, 11))
11918 {
11919 /* Handle load from literal pool. */
11920 /* LDR(3). */
11921 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11922 record_buf[0] = reg_src1;
11923 thumb_insn_r->reg_rec_count = 1;
11924 }
11925 else if (opcode1)
11926 {
11927 /* Special data instructions and branch and exchange */
11928 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11929 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11930 if ((3 == opcode2) && (!opcode3))
11931 {
11932 /* Branch with exchange. */
11933 record_buf[0] = ARM_PS_REGNUM;
11934 thumb_insn_r->reg_rec_count = 1;
11935 }
11936 else
11937 {
11938 /* Format 8; special data processing insns. */
11939 record_buf[0] = ARM_PS_REGNUM;
11940 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11941 | bits (thumb_insn_r->arm_insn, 0, 2));
11942 thumb_insn_r->reg_rec_count = 2;
11943 }
11944 }
11945 else
11946 {
11947 /* Format 5; data processing insns. */
11948 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11949 if (bit (thumb_insn_r->arm_insn, 7))
11950 {
11951 reg_src1 = reg_src1 + 8;
11952 }
11953 record_buf[0] = ARM_PS_REGNUM;
11954 record_buf[1] = reg_src1;
11955 thumb_insn_r->reg_rec_count = 2;
11956 }
11957
11958 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11959 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11960 record_buf_mem);
11961
11962 return 0;
11963 }
11964
11965 /* Handling opcode 001 insns. */
11966
11967 static int
11968 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11969 {
11970 struct regcache *reg_cache = thumb_insn_r->regcache;
11971 uint32_t record_buf[8], record_buf_mem[8];
11972
11973 uint32_t reg_src1 = 0;
11974 uint32_t opcode = 0, immed_5 = 0;
11975
11976 ULONGEST u_regval = 0;
11977
11978 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11979
11980 if (opcode)
11981 {
11982 /* LDR(1). */
11983 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11984 record_buf[0] = reg_src1;
11985 thumb_insn_r->reg_rec_count = 1;
11986 }
11987 else
11988 {
11989 /* STR(1). */
11990 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11991 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11992 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11993 record_buf_mem[0] = 4;
11994 record_buf_mem[1] = u_regval + (immed_5 * 4);
11995 thumb_insn_r->mem_rec_count = 1;
11996 }
11997
11998 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11999 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12000 record_buf_mem);
12001
12002 return 0;
12003 }
12004
12005 /* Handling opcode 100 insns. */
12006
12007 static int
12008 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12009 {
12010 struct regcache *reg_cache = thumb_insn_r->regcache;
12011 uint32_t record_buf[8], record_buf_mem[8];
12012
12013 uint32_t reg_src1 = 0;
12014 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12015
12016 ULONGEST u_regval = 0;
12017
12018 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12019
12020 if (3 == opcode)
12021 {
12022 /* LDR(4). */
12023 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12024 record_buf[0] = reg_src1;
12025 thumb_insn_r->reg_rec_count = 1;
12026 }
12027 else if (1 == opcode)
12028 {
12029 /* LDRH(1). */
12030 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12031 record_buf[0] = reg_src1;
12032 thumb_insn_r->reg_rec_count = 1;
12033 }
12034 else if (2 == opcode)
12035 {
12036 /* STR(3). */
12037 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12038 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12039 record_buf_mem[0] = 4;
12040 record_buf_mem[1] = u_regval + (immed_8 * 4);
12041 thumb_insn_r->mem_rec_count = 1;
12042 }
12043 else if (0 == opcode)
12044 {
12045 /* STRH(1). */
12046 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12047 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12048 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12049 record_buf_mem[0] = 2;
12050 record_buf_mem[1] = u_regval + (immed_5 * 2);
12051 thumb_insn_r->mem_rec_count = 1;
12052 }
12053
12054 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12055 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12056 record_buf_mem);
12057
12058 return 0;
12059 }
12060
12061 /* Handling opcode 101 insns. */
12062
12063 static int
12064 thumb_record_misc (insn_decode_record *thumb_insn_r)
12065 {
12066 struct regcache *reg_cache = thumb_insn_r->regcache;
12067
12068 uint32_t opcode = 0;
12069 uint32_t register_bits = 0, register_count = 0;
12070 uint32_t index = 0, start_address = 0;
12071 uint32_t record_buf[24], record_buf_mem[48];
12072 uint32_t reg_src1;
12073
12074 ULONGEST u_regval = 0;
12075
12076 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12077
12078 if (opcode == 0 || opcode == 1)
12079 {
12080 /* ADR and ADD (SP plus immediate) */
12081
12082 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12083 record_buf[0] = reg_src1;
12084 thumb_insn_r->reg_rec_count = 1;
12085 }
12086 else
12087 {
12088 /* Miscellaneous 16-bit instructions */
12089 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12090
12091 switch (opcode2)
12092 {
12093 case 6:
12094 /* SETEND and CPS */
12095 break;
12096 case 0:
12097 /* ADD/SUB (SP plus immediate) */
12098 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12099 record_buf[0] = ARM_SP_REGNUM;
12100 thumb_insn_r->reg_rec_count = 1;
12101 break;
12102 case 1: /* fall through */
12103 case 3: /* fall through */
12104 case 9: /* fall through */
12105 case 11:
12106 /* CBNZ, CBZ */
12107 break;
12108 case 2:
12109 /* SXTH, SXTB, UXTH, UXTB */
12110 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12111 thumb_insn_r->reg_rec_count = 1;
12112 break;
12113 case 4: /* fall through */
12114 case 5:
12115 /* PUSH. */
12116 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12117 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12118 while (register_bits)
12119 {
12120 if (register_bits & 0x00000001)
12121 register_count++;
12122 register_bits = register_bits >> 1;
12123 }
12124 start_address = u_regval - \
12125 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12126 thumb_insn_r->mem_rec_count = register_count;
12127 while (register_count)
12128 {
12129 record_buf_mem[(register_count * 2) - 1] = start_address;
12130 record_buf_mem[(register_count * 2) - 2] = 4;
12131 start_address = start_address + 4;
12132 register_count--;
12133 }
12134 record_buf[0] = ARM_SP_REGNUM;
12135 thumb_insn_r->reg_rec_count = 1;
12136 break;
12137 case 10:
12138 /* REV, REV16, REVSH */
12139 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12140 thumb_insn_r->reg_rec_count = 1;
12141 break;
12142 case 12: /* fall through */
12143 case 13:
12144 /* POP. */
12145 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12146 while (register_bits)
12147 {
12148 if (register_bits & 0x00000001)
12149 record_buf[index++] = register_count;
12150 register_bits = register_bits >> 1;
12151 register_count++;
12152 }
12153 record_buf[index++] = ARM_PS_REGNUM;
12154 record_buf[index++] = ARM_SP_REGNUM;
12155 thumb_insn_r->reg_rec_count = index;
12156 break;
12157 case 0xe:
12158 /* BKPT insn. */
12159 /* Handle enhanced software breakpoint insn, BKPT. */
12160 /* CPSR is changed to be executed in ARM state, disabling normal
12161 interrupts, entering abort mode. */
12162 /* According to high vector configuration PC is set. */
12163 /* User hits breakpoint and type reverse, in that case, we need to go back with
12164 previous CPSR and Program Counter. */
12165 record_buf[0] = ARM_PS_REGNUM;
12166 record_buf[1] = ARM_LR_REGNUM;
12167 thumb_insn_r->reg_rec_count = 2;
12168 /* We need to save SPSR value, which is not yet done. */
12169 printf_unfiltered (_("Process record does not support instruction "
12170 "0x%0x at address %s.\n"),
12171 thumb_insn_r->arm_insn,
12172 paddress (thumb_insn_r->gdbarch,
12173 thumb_insn_r->this_addr));
12174 return -1;
12175
12176 case 0xf:
12177 /* If-Then, and hints */
12178 break;
12179 default:
12180 return -1;
12181 };
12182 }
12183
12184 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12185 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12186 record_buf_mem);
12187
12188 return 0;
12189 }
12190
12191 /* Handling opcode 110 insns. */
12192
12193 static int
12194 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12195 {
12196 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12197 struct regcache *reg_cache = thumb_insn_r->regcache;
12198
12199 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12200 uint32_t reg_src1 = 0;
12201 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12202 uint32_t index = 0, start_address = 0;
12203 uint32_t record_buf[24], record_buf_mem[48];
12204
12205 ULONGEST u_regval = 0;
12206
12207 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12208 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12209
12210 if (1 == opcode2)
12211 {
12212
12213 /* LDMIA. */
12214 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12215 /* Get Rn. */
12216 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12217 while (register_bits)
12218 {
12219 if (register_bits & 0x00000001)
12220 record_buf[index++] = register_count;
12221 register_bits = register_bits >> 1;
12222 register_count++;
12223 }
12224 record_buf[index++] = reg_src1;
12225 thumb_insn_r->reg_rec_count = index;
12226 }
12227 else if (0 == opcode2)
12228 {
12229 /* It handles both STMIA. */
12230 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12231 /* Get Rn. */
12232 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12233 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12234 while (register_bits)
12235 {
12236 if (register_bits & 0x00000001)
12237 register_count++;
12238 register_bits = register_bits >> 1;
12239 }
12240 start_address = u_regval;
12241 thumb_insn_r->mem_rec_count = register_count;
12242 while (register_count)
12243 {
12244 record_buf_mem[(register_count * 2) - 1] = start_address;
12245 record_buf_mem[(register_count * 2) - 2] = 4;
12246 start_address = start_address + 4;
12247 register_count--;
12248 }
12249 }
12250 else if (0x1F == opcode1)
12251 {
12252 /* Handle arm syscall insn. */
12253 if (tdep->arm_syscall_record != NULL)
12254 {
12255 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12256 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12257 }
12258 else
12259 {
12260 printf_unfiltered (_("no syscall record support\n"));
12261 return -1;
12262 }
12263 }
12264
12265 /* B (1), conditional branch is automatically taken care in process_record,
12266 as PC is saved there. */
12267
12268 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12269 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12270 record_buf_mem);
12271
12272 return ret;
12273 }
12274
12275 /* Handling opcode 111 insns. */
12276
12277 static int
12278 thumb_record_branch (insn_decode_record *thumb_insn_r)
12279 {
12280 uint32_t record_buf[8];
12281 uint32_t bits_h = 0;
12282
12283 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12284
12285 if (2 == bits_h || 3 == bits_h)
12286 {
12287 /* BL */
12288 record_buf[0] = ARM_LR_REGNUM;
12289 thumb_insn_r->reg_rec_count = 1;
12290 }
12291 else if (1 == bits_h)
12292 {
12293 /* BLX(1). */
12294 record_buf[0] = ARM_PS_REGNUM;
12295 record_buf[1] = ARM_LR_REGNUM;
12296 thumb_insn_r->reg_rec_count = 2;
12297 }
12298
12299 /* B(2) is automatically taken care in process_record, as PC is
12300 saved there. */
12301
12302 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12303
12304 return 0;
12305 }
12306
12307 /* Handler for thumb2 load/store multiple instructions. */
12308
12309 static int
12310 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12311 {
12312 struct regcache *reg_cache = thumb2_insn_r->regcache;
12313
12314 uint32_t reg_rn, op;
12315 uint32_t register_bits = 0, register_count = 0;
12316 uint32_t index = 0, start_address = 0;
12317 uint32_t record_buf[24], record_buf_mem[48];
12318
12319 ULONGEST u_regval = 0;
12320
12321 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12322 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12323
12324 if (0 == op || 3 == op)
12325 {
12326 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12327 {
12328 /* Handle RFE instruction. */
12329 record_buf[0] = ARM_PS_REGNUM;
12330 thumb2_insn_r->reg_rec_count = 1;
12331 }
12332 else
12333 {
12334 /* Handle SRS instruction after reading banked SP. */
12335 return arm_record_unsupported_insn (thumb2_insn_r);
12336 }
12337 }
12338 else if (1 == op || 2 == op)
12339 {
12340 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12341 {
12342 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12343 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12344 while (register_bits)
12345 {
12346 if (register_bits & 0x00000001)
12347 record_buf[index++] = register_count;
12348
12349 register_count++;
12350 register_bits = register_bits >> 1;
12351 }
12352 record_buf[index++] = reg_rn;
12353 record_buf[index++] = ARM_PS_REGNUM;
12354 thumb2_insn_r->reg_rec_count = index;
12355 }
12356 else
12357 {
12358 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12359 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12360 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12361 while (register_bits)
12362 {
12363 if (register_bits & 0x00000001)
12364 register_count++;
12365
12366 register_bits = register_bits >> 1;
12367 }
12368
12369 if (1 == op)
12370 {
12371 /* Start address calculation for LDMDB/LDMEA. */
12372 start_address = u_regval;
12373 }
12374 else if (2 == op)
12375 {
12376 /* Start address calculation for LDMDB/LDMEA. */
12377 start_address = u_regval - register_count * 4;
12378 }
12379
12380 thumb2_insn_r->mem_rec_count = register_count;
12381 while (register_count)
12382 {
12383 record_buf_mem[register_count * 2 - 1] = start_address;
12384 record_buf_mem[register_count * 2 - 2] = 4;
12385 start_address = start_address + 4;
12386 register_count--;
12387 }
12388 record_buf[0] = reg_rn;
12389 record_buf[1] = ARM_PS_REGNUM;
12390 thumb2_insn_r->reg_rec_count = 2;
12391 }
12392 }
12393
12394 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12395 record_buf_mem);
12396 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12397 record_buf);
12398 return ARM_RECORD_SUCCESS;
12399 }
12400
12401 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12402 instructions. */
12403
12404 static int
12405 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12406 {
12407 struct regcache *reg_cache = thumb2_insn_r->regcache;
12408
12409 uint32_t reg_rd, reg_rn, offset_imm;
12410 uint32_t reg_dest1, reg_dest2;
12411 uint32_t address, offset_addr;
12412 uint32_t record_buf[8], record_buf_mem[8];
12413 uint32_t op1, op2, op3;
12414
12415 ULONGEST u_regval[2];
12416
12417 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12418 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12419 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12420
12421 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12422 {
12423 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12424 {
12425 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12426 record_buf[0] = reg_dest1;
12427 record_buf[1] = ARM_PS_REGNUM;
12428 thumb2_insn_r->reg_rec_count = 2;
12429 }
12430
12431 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12432 {
12433 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12434 record_buf[2] = reg_dest2;
12435 thumb2_insn_r->reg_rec_count = 3;
12436 }
12437 }
12438 else
12439 {
12440 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12441 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12442
12443 if (0 == op1 && 0 == op2)
12444 {
12445 /* Handle STREX. */
12446 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12447 address = u_regval[0] + (offset_imm * 4);
12448 record_buf_mem[0] = 4;
12449 record_buf_mem[1] = address;
12450 thumb2_insn_r->mem_rec_count = 1;
12451 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12452 record_buf[0] = reg_rd;
12453 thumb2_insn_r->reg_rec_count = 1;
12454 }
12455 else if (1 == op1 && 0 == op2)
12456 {
12457 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12458 record_buf[0] = reg_rd;
12459 thumb2_insn_r->reg_rec_count = 1;
12460 address = u_regval[0];
12461 record_buf_mem[1] = address;
12462
12463 if (4 == op3)
12464 {
12465 /* Handle STREXB. */
12466 record_buf_mem[0] = 1;
12467 thumb2_insn_r->mem_rec_count = 1;
12468 }
12469 else if (5 == op3)
12470 {
12471 /* Handle STREXH. */
12472 record_buf_mem[0] = 2 ;
12473 thumb2_insn_r->mem_rec_count = 1;
12474 }
12475 else if (7 == op3)
12476 {
12477 /* Handle STREXD. */
12478 address = u_regval[0];
12479 record_buf_mem[0] = 4;
12480 record_buf_mem[2] = 4;
12481 record_buf_mem[3] = address + 4;
12482 thumb2_insn_r->mem_rec_count = 2;
12483 }
12484 }
12485 else
12486 {
12487 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12488
12489 if (bit (thumb2_insn_r->arm_insn, 24))
12490 {
12491 if (bit (thumb2_insn_r->arm_insn, 23))
12492 offset_addr = u_regval[0] + (offset_imm * 4);
12493 else
12494 offset_addr = u_regval[0] - (offset_imm * 4);
12495
12496 address = offset_addr;
12497 }
12498 else
12499 address = u_regval[0];
12500
12501 record_buf_mem[0] = 4;
12502 record_buf_mem[1] = address;
12503 record_buf_mem[2] = 4;
12504 record_buf_mem[3] = address + 4;
12505 thumb2_insn_r->mem_rec_count = 2;
12506 record_buf[0] = reg_rn;
12507 thumb2_insn_r->reg_rec_count = 1;
12508 }
12509 }
12510
12511 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12512 record_buf);
12513 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12514 record_buf_mem);
12515 return ARM_RECORD_SUCCESS;
12516 }
12517
12518 /* Handler for thumb2 data processing (shift register and modified immediate)
12519 instructions. */
12520
12521 static int
12522 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12523 {
12524 uint32_t reg_rd, op;
12525 uint32_t record_buf[8];
12526
12527 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12528 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12529
12530 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12531 {
12532 record_buf[0] = ARM_PS_REGNUM;
12533 thumb2_insn_r->reg_rec_count = 1;
12534 }
12535 else
12536 {
12537 record_buf[0] = reg_rd;
12538 record_buf[1] = ARM_PS_REGNUM;
12539 thumb2_insn_r->reg_rec_count = 2;
12540 }
12541
12542 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12543 record_buf);
12544 return ARM_RECORD_SUCCESS;
12545 }
12546
12547 /* Generic handler for thumb2 instructions which effect destination and PS
12548 registers. */
12549
12550 static int
12551 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12552 {
12553 uint32_t reg_rd;
12554 uint32_t record_buf[8];
12555
12556 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12557
12558 record_buf[0] = reg_rd;
12559 record_buf[1] = ARM_PS_REGNUM;
12560 thumb2_insn_r->reg_rec_count = 2;
12561
12562 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12563 record_buf);
12564 return ARM_RECORD_SUCCESS;
12565 }
12566
12567 /* Handler for thumb2 branch and miscellaneous control instructions. */
12568
12569 static int
12570 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12571 {
12572 uint32_t op, op1, op2;
12573 uint32_t record_buf[8];
12574
12575 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12576 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12577 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12578
12579 /* Handle MSR insn. */
12580 if (!(op1 & 0x2) && 0x38 == op)
12581 {
12582 if (!(op2 & 0x3))
12583 {
12584 /* CPSR is going to be changed. */
12585 record_buf[0] = ARM_PS_REGNUM;
12586 thumb2_insn_r->reg_rec_count = 1;
12587 }
12588 else
12589 {
12590 arm_record_unsupported_insn(thumb2_insn_r);
12591 return -1;
12592 }
12593 }
12594 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12595 {
12596 /* BLX. */
12597 record_buf[0] = ARM_PS_REGNUM;
12598 record_buf[1] = ARM_LR_REGNUM;
12599 thumb2_insn_r->reg_rec_count = 2;
12600 }
12601
12602 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12603 record_buf);
12604 return ARM_RECORD_SUCCESS;
12605 }
12606
12607 /* Handler for thumb2 store single data item instructions. */
12608
12609 static int
12610 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12611 {
12612 struct regcache *reg_cache = thumb2_insn_r->regcache;
12613
12614 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12615 uint32_t address, offset_addr;
12616 uint32_t record_buf[8], record_buf_mem[8];
12617 uint32_t op1, op2;
12618
12619 ULONGEST u_regval[2];
12620
12621 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12622 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12623 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12624 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12625
12626 if (bit (thumb2_insn_r->arm_insn, 23))
12627 {
12628 /* T2 encoding. */
12629 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12630 offset_addr = u_regval[0] + offset_imm;
12631 address = offset_addr;
12632 }
12633 else
12634 {
12635 /* T3 encoding. */
12636 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12637 {
12638 /* Handle STRB (register). */
12639 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12640 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12641 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12642 offset_addr = u_regval[1] << shift_imm;
12643 address = u_regval[0] + offset_addr;
12644 }
12645 else
12646 {
12647 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12648 if (bit (thumb2_insn_r->arm_insn, 10))
12649 {
12650 if (bit (thumb2_insn_r->arm_insn, 9))
12651 offset_addr = u_regval[0] + offset_imm;
12652 else
12653 offset_addr = u_regval[0] - offset_imm;
12654
12655 address = offset_addr;
12656 }
12657 else
12658 address = u_regval[0];
12659 }
12660 }
12661
12662 switch (op1)
12663 {
12664 /* Store byte instructions. */
12665 case 4:
12666 case 0:
12667 record_buf_mem[0] = 1;
12668 break;
12669 /* Store half word instructions. */
12670 case 1:
12671 case 5:
12672 record_buf_mem[0] = 2;
12673 break;
12674 /* Store word instructions. */
12675 case 2:
12676 case 6:
12677 record_buf_mem[0] = 4;
12678 break;
12679
12680 default:
12681 gdb_assert_not_reached ("no decoding pattern found");
12682 break;
12683 }
12684
12685 record_buf_mem[1] = address;
12686 thumb2_insn_r->mem_rec_count = 1;
12687 record_buf[0] = reg_rn;
12688 thumb2_insn_r->reg_rec_count = 1;
12689
12690 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12691 record_buf);
12692 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12693 record_buf_mem);
12694 return ARM_RECORD_SUCCESS;
12695 }
12696
12697 /* Handler for thumb2 load memory hints instructions. */
12698
12699 static int
12700 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12701 {
12702 uint32_t record_buf[8];
12703 uint32_t reg_rt, reg_rn;
12704
12705 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12706 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12707
12708 if (ARM_PC_REGNUM != reg_rt)
12709 {
12710 record_buf[0] = reg_rt;
12711 record_buf[1] = reg_rn;
12712 record_buf[2] = ARM_PS_REGNUM;
12713 thumb2_insn_r->reg_rec_count = 3;
12714
12715 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12716 record_buf);
12717 return ARM_RECORD_SUCCESS;
12718 }
12719
12720 return ARM_RECORD_FAILURE;
12721 }
12722
12723 /* Handler for thumb2 load word instructions. */
12724
12725 static int
12726 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12727 {
12728 uint32_t record_buf[8];
12729
12730 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12731 record_buf[1] = ARM_PS_REGNUM;
12732 thumb2_insn_r->reg_rec_count = 2;
12733
12734 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12735 record_buf);
12736 return ARM_RECORD_SUCCESS;
12737 }
12738
12739 /* Handler for thumb2 long multiply, long multiply accumulate, and
12740 divide instructions. */
12741
12742 static int
12743 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12744 {
12745 uint32_t opcode1 = 0, opcode2 = 0;
12746 uint32_t record_buf[8];
12747
12748 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12749 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12750
12751 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12752 {
12753 /* Handle SMULL, UMULL, SMULAL. */
12754 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12755 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12756 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12757 record_buf[2] = ARM_PS_REGNUM;
12758 thumb2_insn_r->reg_rec_count = 3;
12759 }
12760 else if (1 == opcode1 || 3 == opcode2)
12761 {
12762 /* Handle SDIV and UDIV. */
12763 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12764 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12765 record_buf[2] = ARM_PS_REGNUM;
12766 thumb2_insn_r->reg_rec_count = 3;
12767 }
12768 else
12769 return ARM_RECORD_FAILURE;
12770
12771 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12772 record_buf);
12773 return ARM_RECORD_SUCCESS;
12774 }
12775
12776 /* Record handler for thumb32 coprocessor instructions. */
12777
12778 static int
12779 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12780 {
12781 if (bit (thumb2_insn_r->arm_insn, 25))
12782 return arm_record_coproc_data_proc (thumb2_insn_r);
12783 else
12784 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12785 }
12786
12787 /* Record handler for advance SIMD structure load/store instructions. */
12788
12789 static int
12790 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12791 {
12792 struct regcache *reg_cache = thumb2_insn_r->regcache;
12793 uint32_t l_bit, a_bit, b_bits;
12794 uint32_t record_buf[128], record_buf_mem[128];
12795 uint32_t reg_rn, reg_vd, address, f_elem;
12796 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12797 uint8_t f_ebytes;
12798
12799 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12800 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12801 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12802 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12803 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12804 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12805 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12806 f_elem = 8 / f_ebytes;
12807
12808 if (!l_bit)
12809 {
12810 ULONGEST u_regval = 0;
12811 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12812 address = u_regval;
12813
12814 if (!a_bit)
12815 {
12816 /* Handle VST1. */
12817 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12818 {
12819 if (b_bits == 0x07)
12820 bf_regs = 1;
12821 else if (b_bits == 0x0a)
12822 bf_regs = 2;
12823 else if (b_bits == 0x06)
12824 bf_regs = 3;
12825 else if (b_bits == 0x02)
12826 bf_regs = 4;
12827 else
12828 bf_regs = 0;
12829
12830 for (index_r = 0; index_r < bf_regs; index_r++)
12831 {
12832 for (index_e = 0; index_e < f_elem; index_e++)
12833 {
12834 record_buf_mem[index_m++] = f_ebytes;
12835 record_buf_mem[index_m++] = address;
12836 address = address + f_ebytes;
12837 thumb2_insn_r->mem_rec_count += 1;
12838 }
12839 }
12840 }
12841 /* Handle VST2. */
12842 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12843 {
12844 if (b_bits == 0x09 || b_bits == 0x08)
12845 bf_regs = 1;
12846 else if (b_bits == 0x03)
12847 bf_regs = 2;
12848 else
12849 bf_regs = 0;
12850
12851 for (index_r = 0; index_r < bf_regs; index_r++)
12852 for (index_e = 0; index_e < f_elem; index_e++)
12853 {
12854 for (loop_t = 0; loop_t < 2; loop_t++)
12855 {
12856 record_buf_mem[index_m++] = f_ebytes;
12857 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12858 thumb2_insn_r->mem_rec_count += 1;
12859 }
12860 address = address + (2 * f_ebytes);
12861 }
12862 }
12863 /* Handle VST3. */
12864 else if ((b_bits & 0x0e) == 0x04)
12865 {
12866 for (index_e = 0; index_e < f_elem; index_e++)
12867 {
12868 for (loop_t = 0; loop_t < 3; loop_t++)
12869 {
12870 record_buf_mem[index_m++] = f_ebytes;
12871 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12872 thumb2_insn_r->mem_rec_count += 1;
12873 }
12874 address = address + (3 * f_ebytes);
12875 }
12876 }
12877 /* Handle VST4. */
12878 else if (!(b_bits & 0x0e))
12879 {
12880 for (index_e = 0; index_e < f_elem; index_e++)
12881 {
12882 for (loop_t = 0; loop_t < 4; loop_t++)
12883 {
12884 record_buf_mem[index_m++] = f_ebytes;
12885 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12886 thumb2_insn_r->mem_rec_count += 1;
12887 }
12888 address = address + (4 * f_ebytes);
12889 }
12890 }
12891 }
12892 else
12893 {
12894 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12895
12896 if (bft_size == 0x00)
12897 f_ebytes = 1;
12898 else if (bft_size == 0x01)
12899 f_ebytes = 2;
12900 else if (bft_size == 0x02)
12901 f_ebytes = 4;
12902 else
12903 f_ebytes = 0;
12904
12905 /* Handle VST1. */
12906 if (!(b_bits & 0x0b) || b_bits == 0x08)
12907 thumb2_insn_r->mem_rec_count = 1;
12908 /* Handle VST2. */
12909 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12910 thumb2_insn_r->mem_rec_count = 2;
12911 /* Handle VST3. */
12912 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12913 thumb2_insn_r->mem_rec_count = 3;
12914 /* Handle VST4. */
12915 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12916 thumb2_insn_r->mem_rec_count = 4;
12917
12918 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12919 {
12920 record_buf_mem[index_m] = f_ebytes;
12921 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12922 }
12923 }
12924 }
12925 else
12926 {
12927 if (!a_bit)
12928 {
12929 /* Handle VLD1. */
12930 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12931 thumb2_insn_r->reg_rec_count = 1;
12932 /* Handle VLD2. */
12933 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12934 thumb2_insn_r->reg_rec_count = 2;
12935 /* Handle VLD3. */
12936 else if ((b_bits & 0x0e) == 0x04)
12937 thumb2_insn_r->reg_rec_count = 3;
12938 /* Handle VLD4. */
12939 else if (!(b_bits & 0x0e))
12940 thumb2_insn_r->reg_rec_count = 4;
12941 }
12942 else
12943 {
12944 /* Handle VLD1. */
12945 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12946 thumb2_insn_r->reg_rec_count = 1;
12947 /* Handle VLD2. */
12948 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12949 thumb2_insn_r->reg_rec_count = 2;
12950 /* Handle VLD3. */
12951 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12952 thumb2_insn_r->reg_rec_count = 3;
12953 /* Handle VLD4. */
12954 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12955 thumb2_insn_r->reg_rec_count = 4;
12956
12957 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12958 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12959 }
12960 }
12961
12962 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12963 {
12964 record_buf[index_r] = reg_rn;
12965 thumb2_insn_r->reg_rec_count += 1;
12966 }
12967
12968 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12969 record_buf);
12970 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12971 record_buf_mem);
12972 return 0;
12973 }
12974
12975 /* Decodes thumb2 instruction type and invokes its record handler. */
12976
12977 static unsigned int
12978 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12979 {
12980 uint32_t op, op1, op2;
12981
12982 op = bit (thumb2_insn_r->arm_insn, 15);
12983 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12984 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12985
12986 if (op1 == 0x01)
12987 {
12988 if (!(op2 & 0x64 ))
12989 {
12990 /* Load/store multiple instruction. */
12991 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12992 }
12993 else if ((op2 & 0x64) == 0x4)
12994 {
12995 /* Load/store (dual/exclusive) and table branch instruction. */
12996 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12997 }
12998 else if ((op2 & 0x60) == 0x20)
12999 {
13000 /* Data-processing (shifted register). */
13001 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13002 }
13003 else if (op2 & 0x40)
13004 {
13005 /* Co-processor instructions. */
13006 return thumb2_record_coproc_insn (thumb2_insn_r);
13007 }
13008 }
13009 else if (op1 == 0x02)
13010 {
13011 if (op)
13012 {
13013 /* Branches and miscellaneous control instructions. */
13014 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13015 }
13016 else if (op2 & 0x20)
13017 {
13018 /* Data-processing (plain binary immediate) instruction. */
13019 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13020 }
13021 else
13022 {
13023 /* Data-processing (modified immediate). */
13024 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13025 }
13026 }
13027 else if (op1 == 0x03)
13028 {
13029 if (!(op2 & 0x71 ))
13030 {
13031 /* Store single data item. */
13032 return thumb2_record_str_single_data (thumb2_insn_r);
13033 }
13034 else if (!((op2 & 0x71) ^ 0x10))
13035 {
13036 /* Advanced SIMD or structure load/store instructions. */
13037 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13038 }
13039 else if (!((op2 & 0x67) ^ 0x01))
13040 {
13041 /* Load byte, memory hints instruction. */
13042 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13043 }
13044 else if (!((op2 & 0x67) ^ 0x03))
13045 {
13046 /* Load halfword, memory hints instruction. */
13047 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13048 }
13049 else if (!((op2 & 0x67) ^ 0x05))
13050 {
13051 /* Load word instruction. */
13052 return thumb2_record_ld_word (thumb2_insn_r);
13053 }
13054 else if (!((op2 & 0x70) ^ 0x20))
13055 {
13056 /* Data-processing (register) instruction. */
13057 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13058 }
13059 else if (!((op2 & 0x78) ^ 0x30))
13060 {
13061 /* Multiply, multiply accumulate, abs diff instruction. */
13062 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13063 }
13064 else if (!((op2 & 0x78) ^ 0x38))
13065 {
13066 /* Long multiply, long multiply accumulate, and divide. */
13067 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13068 }
13069 else if (op2 & 0x40)
13070 {
13071 /* Co-processor instructions. */
13072 return thumb2_record_coproc_insn (thumb2_insn_r);
13073 }
13074 }
13075
13076 return -1;
13077 }
13078
13079 namespace {
13080 /* Abstract memory reader. */
13081
13082 class abstract_memory_reader
13083 {
13084 public:
13085 /* Read LEN bytes of target memory at address MEMADDR, placing the
13086 results in GDB's memory at BUF. Return true on success. */
13087
13088 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13089 };
13090
13091 /* Instruction reader from real target. */
13092
13093 class instruction_reader : public abstract_memory_reader
13094 {
13095 public:
13096 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13097 {
13098 if (target_read_memory (memaddr, buf, len))
13099 return false;
13100 else
13101 return true;
13102 }
13103 };
13104
13105 } // namespace
13106
13107 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13108 and positive val on failure. */
13109
13110 static int
13111 extract_arm_insn (abstract_memory_reader& reader,
13112 insn_decode_record *insn_record, uint32_t insn_size)
13113 {
13114 gdb_byte buf[insn_size];
13115
13116 memset (&buf[0], 0, insn_size);
13117
13118 if (!reader.read (insn_record->this_addr, buf, insn_size))
13119 return 1;
13120 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13121 insn_size,
13122 gdbarch_byte_order_for_code (insn_record->gdbarch));
13123 return 0;
13124 }
13125
13126 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13127
13128 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13129 dispatch it. */
13130
13131 static int
13132 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13133 record_type_t record_type, uint32_t insn_size)
13134 {
13135
13136 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13137 instruction. */
13138 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13139 {
13140 arm_record_data_proc_misc_ld_str, /* 000. */
13141 arm_record_data_proc_imm, /* 001. */
13142 arm_record_ld_st_imm_offset, /* 010. */
13143 arm_record_ld_st_reg_offset, /* 011. */
13144 arm_record_ld_st_multiple, /* 100. */
13145 arm_record_b_bl, /* 101. */
13146 arm_record_asimd_vfp_coproc, /* 110. */
13147 arm_record_coproc_data_proc /* 111. */
13148 };
13149
13150 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13151 instruction. */
13152 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13153 { \
13154 thumb_record_shift_add_sub, /* 000. */
13155 thumb_record_add_sub_cmp_mov, /* 001. */
13156 thumb_record_ld_st_reg_offset, /* 010. */
13157 thumb_record_ld_st_imm_offset, /* 011. */
13158 thumb_record_ld_st_stack, /* 100. */
13159 thumb_record_misc, /* 101. */
13160 thumb_record_ldm_stm_swi, /* 110. */
13161 thumb_record_branch /* 111. */
13162 };
13163
13164 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13165 uint32_t insn_id = 0;
13166
13167 if (extract_arm_insn (reader, arm_record, insn_size))
13168 {
13169 if (record_debug)
13170 {
13171 printf_unfiltered (_("Process record: error reading memory at "
13172 "addr %s len = %d.\n"),
13173 paddress (arm_record->gdbarch,
13174 arm_record->this_addr), insn_size);
13175 }
13176 return -1;
13177 }
13178 else if (ARM_RECORD == record_type)
13179 {
13180 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13181 insn_id = bits (arm_record->arm_insn, 25, 27);
13182
13183 if (arm_record->cond == 0xf)
13184 ret = arm_record_extension_space (arm_record);
13185 else
13186 {
13187 /* If this insn has fallen into extension space
13188 then we need not decode it anymore. */
13189 ret = arm_handle_insn[insn_id] (arm_record);
13190 }
13191 if (ret != ARM_RECORD_SUCCESS)
13192 {
13193 arm_record_unsupported_insn (arm_record);
13194 ret = -1;
13195 }
13196 }
13197 else if (THUMB_RECORD == record_type)
13198 {
13199 /* As thumb does not have condition codes, we set negative. */
13200 arm_record->cond = -1;
13201 insn_id = bits (arm_record->arm_insn, 13, 15);
13202 ret = thumb_handle_insn[insn_id] (arm_record);
13203 if (ret != ARM_RECORD_SUCCESS)
13204 {
13205 arm_record_unsupported_insn (arm_record);
13206 ret = -1;
13207 }
13208 }
13209 else if (THUMB2_RECORD == record_type)
13210 {
13211 /* As thumb does not have condition codes, we set negative. */
13212 arm_record->cond = -1;
13213
13214 /* Swap first half of 32bit thumb instruction with second half. */
13215 arm_record->arm_insn
13216 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13217
13218 ret = thumb2_record_decode_insn_handler (arm_record);
13219
13220 if (ret != ARM_RECORD_SUCCESS)
13221 {
13222 arm_record_unsupported_insn (arm_record);
13223 ret = -1;
13224 }
13225 }
13226 else
13227 {
13228 /* Throw assertion. */
13229 gdb_assert_not_reached ("not a valid instruction, could not decode");
13230 }
13231
13232 return ret;
13233 }
13234
13235 #if GDB_SELF_TEST
13236 namespace selftests {
13237
13238 /* Provide both 16-bit and 32-bit thumb instructions. */
13239
13240 class instruction_reader_thumb : public abstract_memory_reader
13241 {
13242 public:
13243 template<size_t SIZE>
13244 instruction_reader_thumb (enum bfd_endian endian,
13245 const uint16_t (&insns)[SIZE])
13246 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13247 {}
13248
13249 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13250 {
13251 SELF_CHECK (len == 4 || len == 2);
13252 SELF_CHECK (memaddr % 2 == 0);
13253 SELF_CHECK ((memaddr / 2) < m_insns_size);
13254
13255 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13256 if (len == 4)
13257 {
13258 store_unsigned_integer (&buf[2], 2, m_endian,
13259 m_insns[memaddr / 2 + 1]);
13260 }
13261 return true;
13262 }
13263
13264 private:
13265 enum bfd_endian m_endian;
13266 const uint16_t *m_insns;
13267 size_t m_insns_size;
13268 };
13269
13270 static void
13271 arm_record_test (void)
13272 {
13273 struct gdbarch_info info;
13274 gdbarch_info_init (&info);
13275 info.bfd_arch_info = bfd_scan_arch ("arm");
13276
13277 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13278
13279 SELF_CHECK (gdbarch != NULL);
13280
13281 /* 16-bit Thumb instructions. */
13282 {
13283 insn_decode_record arm_record;
13284
13285 memset (&arm_record, 0, sizeof (insn_decode_record));
13286 arm_record.gdbarch = gdbarch;
13287
13288 static const uint16_t insns[] = {
13289 /* db b2 uxtb r3, r3 */
13290 0xb2db,
13291 /* cd 58 ldr r5, [r1, r3] */
13292 0x58cd,
13293 };
13294
13295 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13296 instruction_reader_thumb reader (endian, insns);
13297 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13298 THUMB_INSN_SIZE_BYTES);
13299
13300 SELF_CHECK (ret == 0);
13301 SELF_CHECK (arm_record.mem_rec_count == 0);
13302 SELF_CHECK (arm_record.reg_rec_count == 1);
13303 SELF_CHECK (arm_record.arm_regs[0] == 3);
13304
13305 arm_record.this_addr += 2;
13306 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13307 THUMB_INSN_SIZE_BYTES);
13308
13309 SELF_CHECK (ret == 0);
13310 SELF_CHECK (arm_record.mem_rec_count == 0);
13311 SELF_CHECK (arm_record.reg_rec_count == 1);
13312 SELF_CHECK (arm_record.arm_regs[0] == 5);
13313 }
13314
13315 /* 32-bit Thumb-2 instructions. */
13316 {
13317 insn_decode_record arm_record;
13318
13319 memset (&arm_record, 0, sizeof (insn_decode_record));
13320 arm_record.gdbarch = gdbarch;
13321
13322 static const uint16_t insns[] = {
13323 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13324 0xee1d, 0x7f70,
13325 };
13326
13327 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13328 instruction_reader_thumb reader (endian, insns);
13329 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13330 THUMB2_INSN_SIZE_BYTES);
13331
13332 SELF_CHECK (ret == 0);
13333 SELF_CHECK (arm_record.mem_rec_count == 0);
13334 SELF_CHECK (arm_record.reg_rec_count == 1);
13335 SELF_CHECK (arm_record.arm_regs[0] == 7);
13336 }
13337 }
13338 } // namespace selftests
13339 #endif /* GDB_SELF_TEST */
13340
13341 /* Cleans up local record registers and memory allocations. */
13342
13343 static void
13344 deallocate_reg_mem (insn_decode_record *record)
13345 {
13346 xfree (record->arm_regs);
13347 xfree (record->arm_mems);
13348 }
13349
13350
13351 /* Parse the current instruction and record the values of the registers and
13352 memory that will be changed in current instruction to record_arch_list".
13353 Return -1 if something is wrong. */
13354
13355 int
13356 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13357 CORE_ADDR insn_addr)
13358 {
13359
13360 uint32_t no_of_rec = 0;
13361 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13362 ULONGEST t_bit = 0, insn_id = 0;
13363
13364 ULONGEST u_regval = 0;
13365
13366 insn_decode_record arm_record;
13367
13368 memset (&arm_record, 0, sizeof (insn_decode_record));
13369 arm_record.regcache = regcache;
13370 arm_record.this_addr = insn_addr;
13371 arm_record.gdbarch = gdbarch;
13372
13373
13374 if (record_debug > 1)
13375 {
13376 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13377 "addr = %s\n",
13378 paddress (gdbarch, arm_record.this_addr));
13379 }
13380
13381 instruction_reader reader;
13382 if (extract_arm_insn (reader, &arm_record, 2))
13383 {
13384 if (record_debug)
13385 {
13386 printf_unfiltered (_("Process record: error reading memory at "
13387 "addr %s len = %d.\n"),
13388 paddress (arm_record.gdbarch,
13389 arm_record.this_addr), 2);
13390 }
13391 return -1;
13392 }
13393
13394 /* Check the insn, whether it is thumb or arm one. */
13395
13396 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13397 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13398
13399
13400 if (!(u_regval & t_bit))
13401 {
13402 /* We are decoding arm insn. */
13403 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13404 }
13405 else
13406 {
13407 insn_id = bits (arm_record.arm_insn, 11, 15);
13408 /* is it thumb2 insn? */
13409 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13410 {
13411 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13412 THUMB2_INSN_SIZE_BYTES);
13413 }
13414 else
13415 {
13416 /* We are decoding thumb insn. */
13417 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13418 THUMB_INSN_SIZE_BYTES);
13419 }
13420 }
13421
13422 if (0 == ret)
13423 {
13424 /* Record registers. */
13425 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13426 if (arm_record.arm_regs)
13427 {
13428 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13429 {
13430 if (record_full_arch_list_add_reg
13431 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13432 ret = -1;
13433 }
13434 }
13435 /* Record memories. */
13436 if (arm_record.arm_mems)
13437 {
13438 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13439 {
13440 if (record_full_arch_list_add_mem
13441 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13442 arm_record.arm_mems[no_of_rec].len))
13443 ret = -1;
13444 }
13445 }
13446
13447 if (record_full_arch_list_add_end ())
13448 ret = -1;
13449 }
13450
13451
13452 deallocate_reg_mem (&arm_record);
13453
13454 return ret;
13455 }
13456
13457 /* See arm-tdep.h. */
13458
13459 const target_desc *
13460 arm_read_description (arm_fp_type fp_type)
13461 {
13462 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13463
13464 if (tdesc == nullptr)
13465 {
13466 tdesc = arm_create_target_description (fp_type);
13467 tdesc_arm_list[fp_type] = tdesc;
13468 }
13469
13470 return tdesc;
13471 }
13472
13473 /* See arm-tdep.h. */
13474
13475 const target_desc *
13476 arm_read_mprofile_description (arm_m_profile_type m_type)
13477 {
13478 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13479
13480 if (tdesc == nullptr)
13481 {
13482 tdesc = arm_create_mprofile_target_description (m_type);
13483 tdesc_arm_mprofile_list[m_type] = tdesc;
13484 }
13485
13486 return tdesc;
13487 }