Fix a few comments in maint-test-settings.c
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "common/vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
71
72 #if GDB_SELF_TEST
73 #include "common/selftest.h"
74 #endif
75
76 static int arm_debug;
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
90
91 struct arm_mapping_symbol
92 {
93 bfd_vma value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98 };
99
100 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102 struct arm_per_objfile
103 {
104 explicit arm_per_objfile (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124 };
125
126 /* Per-objfile data used for mapping symbols. */
127 static objfile_key<arm_per_objfile> arm_objfile_data_key;
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* This is used to keep the bfd arch_info in sync with the disassembly
244 style. */
245 static void set_disassembly_style_sfunc (const char *, int,
246 struct cmd_list_element *);
247 static void show_disassembly_style_sfunc (struct ui_file *, int,
248 struct cmd_list_element *,
249 const char *);
250
251 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
252 readable_regcache *regcache,
253 int regnum, gdb_byte *buf);
254 static void arm_neon_quad_write (struct gdbarch *gdbarch,
255 struct regcache *regcache,
256 int regnum, const gdb_byte *buf);
257
258 static CORE_ADDR
259 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
260
261
262 /* get_next_pcs operations. */
263 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
264 arm_get_next_pcs_read_memory_unsigned_integer,
265 arm_get_next_pcs_syscall_next_pc,
266 arm_get_next_pcs_addr_bits_remove,
267 arm_get_next_pcs_is_thumb,
268 NULL,
269 };
270
271 struct arm_prologue_cache
272 {
273 /* The stack pointer at the time this frame was created; i.e. the
274 caller's stack pointer when this function was called. It is used
275 to identify this frame. */
276 CORE_ADDR prev_sp;
277
278 /* The frame base for this frame is just prev_sp - frame size.
279 FRAMESIZE is the distance from the frame pointer to the
280 initial stack pointer. */
281
282 int framesize;
283
284 /* The register used to hold the frame pointer for this frame. */
285 int framereg;
286
287 /* Saved register offsets. */
288 struct trad_frame_saved_reg *saved_regs;
289 };
290
291 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
292 CORE_ADDR prologue_start,
293 CORE_ADDR prologue_end,
294 struct arm_prologue_cache *cache);
295
296 /* Architecture version for displaced stepping. This effects the behaviour of
297 certain instructions, and really should not be hard-wired. */
298
299 #define DISPLACED_STEPPING_ARCH_VERSION 5
300
301 /* Set to true if the 32-bit mode is in use. */
302
303 int arm_apcs_32 = 1;
304
305 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
306
307 int
308 arm_psr_thumb_bit (struct gdbarch *gdbarch)
309 {
310 if (gdbarch_tdep (gdbarch)->is_m)
311 return XPSR_T;
312 else
313 return CPSR_T;
314 }
315
316 /* Determine if the processor is currently executing in Thumb mode. */
317
318 int
319 arm_is_thumb (struct regcache *regcache)
320 {
321 ULONGEST cpsr;
322 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
323
324 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
325
326 return (cpsr & t_bit) != 0;
327 }
328
329 /* Determine if FRAME is executing in Thumb mode. */
330
331 int
332 arm_frame_is_thumb (struct frame_info *frame)
333 {
334 CORE_ADDR cpsr;
335 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
336
337 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
338 directly (from a signal frame or dummy frame) or by interpreting
339 the saved LR (from a prologue or DWARF frame). So consult it and
340 trust the unwinders. */
341 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
342
343 return (cpsr & t_bit) != 0;
344 }
345
346 /* Search for the mapping symbol covering MEMADDR. If one is found,
347 return its type. Otherwise, return 0. If START is non-NULL,
348 set *START to the location of the mapping symbol. */
349
350 static char
351 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
352 {
353 struct obj_section *sec;
354
355 /* If there are mapping symbols, consult them. */
356 sec = find_pc_section (memaddr);
357 if (sec != NULL)
358 {
359 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
360 if (data != NULL)
361 {
362 unsigned int section_idx = sec->the_bfd_section->index;
363 arm_mapping_symbol_vec &map
364 = data->section_maps[section_idx];
365
366 /* Sort the vector on first use. */
367 if (!data->section_maps_sorted[section_idx])
368 {
369 std::sort (map.begin (), map.end ());
370 data->section_maps_sorted[section_idx] = true;
371 }
372
373 struct arm_mapping_symbol map_key
374 = { memaddr - obj_section_addr (sec), 0 };
375 arm_mapping_symbol_vec::const_iterator it
376 = std::lower_bound (map.begin (), map.end (), map_key);
377
378 /* std::lower_bound finds the earliest ordered insertion
379 point. If the symbol at this position starts at this exact
380 address, we use that; otherwise, the preceding
381 mapping symbol covers this address. */
382 if (it < map.end ())
383 {
384 if (it->value == map_key.value)
385 {
386 if (start)
387 *start = it->value + obj_section_addr (sec);
388 return it->type;
389 }
390 }
391
392 if (it > map.begin ())
393 {
394 arm_mapping_symbol_vec::const_iterator prev_it
395 = it - 1;
396
397 if (start)
398 *start = prev_it->value + obj_section_addr (sec);
399 return prev_it->type;
400 }
401 }
402 }
403
404 return 0;
405 }
406
407 /* Determine if the program counter specified in MEMADDR is in a Thumb
408 function. This function should be called for addresses unrelated to
409 any executing frame; otherwise, prefer arm_frame_is_thumb. */
410
411 int
412 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
413 {
414 struct bound_minimal_symbol sym;
415 char type;
416 arm_displaced_step_closure *dsc
417 = ((arm_displaced_step_closure * )
418 get_displaced_step_closure_by_addr (memaddr));
419
420 /* If checking the mode of displaced instruction in copy area, the mode
421 should be determined by instruction on the original address. */
422 if (dsc)
423 {
424 if (debug_displaced)
425 fprintf_unfiltered (gdb_stdlog,
426 "displaced: check mode of %.8lx instead of %.8lx\n",
427 (unsigned long) dsc->insn_addr,
428 (unsigned long) memaddr);
429 memaddr = dsc->insn_addr;
430 }
431
432 /* If bit 0 of the address is set, assume this is a Thumb address. */
433 if (IS_THUMB_ADDR (memaddr))
434 return 1;
435
436 /* If the user wants to override the symbol table, let him. */
437 if (strcmp (arm_force_mode_string, "arm") == 0)
438 return 0;
439 if (strcmp (arm_force_mode_string, "thumb") == 0)
440 return 1;
441
442 /* ARM v6-M and v7-M are always in Thumb mode. */
443 if (gdbarch_tdep (gdbarch)->is_m)
444 return 1;
445
446 /* If there are mapping symbols, consult them. */
447 type = arm_find_mapping_symbol (memaddr, NULL);
448 if (type)
449 return type == 't';
450
451 /* Thumb functions have a "special" bit set in minimal symbols. */
452 sym = lookup_minimal_symbol_by_pc (memaddr);
453 if (sym.minsym)
454 return (MSYMBOL_IS_SPECIAL (sym.minsym));
455
456 /* If the user wants to override the fallback mode, let them. */
457 if (strcmp (arm_fallback_mode_string, "arm") == 0)
458 return 0;
459 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
460 return 1;
461
462 /* If we couldn't find any symbol, but we're talking to a running
463 target, then trust the current value of $cpsr. This lets
464 "display/i $pc" always show the correct mode (though if there is
465 a symbol table we will not reach here, so it still may not be
466 displayed in the mode it will be executed). */
467 if (target_has_registers)
468 return arm_frame_is_thumb (get_current_frame ());
469
470 /* Otherwise we're out of luck; we assume ARM. */
471 return 0;
472 }
473
474 /* Determine if the address specified equals any of these magic return
475 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
476 architectures.
477
478 From ARMv6-M Reference Manual B1.5.8
479 Table B1-5 Exception return behavior
480
481 EXC_RETURN Return To Return Stack
482 0xFFFFFFF1 Handler mode Main
483 0xFFFFFFF9 Thread mode Main
484 0xFFFFFFFD Thread mode Process
485
486 From ARMv7-M Reference Manual B1.5.8
487 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
488
489 EXC_RETURN Return To Return Stack
490 0xFFFFFFF1 Handler mode Main
491 0xFFFFFFF9 Thread mode Main
492 0xFFFFFFFD Thread mode Process
493
494 Table B1-9 EXC_RETURN definition of exception return behavior, with
495 FP
496
497 EXC_RETURN Return To Return Stack Frame Type
498 0xFFFFFFE1 Handler mode Main Extended
499 0xFFFFFFE9 Thread mode Main Extended
500 0xFFFFFFED Thread mode Process Extended
501 0xFFFFFFF1 Handler mode Main Basic
502 0xFFFFFFF9 Thread mode Main Basic
503 0xFFFFFFFD Thread mode Process Basic
504
505 For more details see "B1.5.8 Exception return behavior"
506 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
507
508 static int
509 arm_m_addr_is_magic (CORE_ADDR addr)
510 {
511 switch (addr)
512 {
513 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
514 the exception return behavior. */
515 case 0xffffffe1:
516 case 0xffffffe9:
517 case 0xffffffed:
518 case 0xfffffff1:
519 case 0xfffffff9:
520 case 0xfffffffd:
521 /* Address is magic. */
522 return 1;
523
524 default:
525 /* Address is not magic. */
526 return 0;
527 }
528 }
529
530 /* Remove useless bits from addresses in a running program. */
531 static CORE_ADDR
532 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
533 {
534 /* On M-profile devices, do not strip the low bit from EXC_RETURN
535 (the magic exception return address). */
536 if (gdbarch_tdep (gdbarch)->is_m
537 && arm_m_addr_is_magic (val))
538 return val;
539
540 if (arm_apcs_32)
541 return UNMAKE_THUMB_ADDR (val);
542 else
543 return (val & 0x03fffffc);
544 }
545
546 /* Return 1 if PC is the start of a compiler helper function which
547 can be safely ignored during prologue skipping. IS_THUMB is true
548 if the function is known to be a Thumb function due to the way it
549 is being called. */
550 static int
551 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
552 {
553 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
554 struct bound_minimal_symbol msym;
555
556 msym = lookup_minimal_symbol_by_pc (pc);
557 if (msym.minsym != NULL
558 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
559 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
560 {
561 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
562
563 /* The GNU linker's Thumb call stub to foo is named
564 __foo_from_thumb. */
565 if (strstr (name, "_from_thumb") != NULL)
566 name += 2;
567
568 /* On soft-float targets, __truncdfsf2 is called to convert promoted
569 arguments to their argument types in non-prototyped
570 functions. */
571 if (startswith (name, "__truncdfsf2"))
572 return 1;
573 if (startswith (name, "__aeabi_d2f"))
574 return 1;
575
576 /* Internal functions related to thread-local storage. */
577 if (startswith (name, "__tls_get_addr"))
578 return 1;
579 if (startswith (name, "__aeabi_read_tp"))
580 return 1;
581 }
582 else
583 {
584 /* If we run against a stripped glibc, we may be unable to identify
585 special functions by name. Check for one important case,
586 __aeabi_read_tp, by comparing the *code* against the default
587 implementation (this is hand-written ARM assembler in glibc). */
588
589 if (!is_thumb
590 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
591 == 0xe3e00a0f /* mov r0, #0xffff0fff */
592 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
593 == 0xe240f01f) /* sub pc, r0, #31 */
594 return 1;
595 }
596
597 return 0;
598 }
599
600 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
601 the first 16-bit of instruction, and INSN2 is the second 16-bit of
602 instruction. */
603 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
604 ((bits ((insn1), 0, 3) << 12) \
605 | (bits ((insn1), 10, 10) << 11) \
606 | (bits ((insn2), 12, 14) << 8) \
607 | bits ((insn2), 0, 7))
608
609 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
610 the 32-bit instruction. */
611 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
612 ((bits ((insn), 16, 19) << 12) \
613 | bits ((insn), 0, 11))
614
615 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
616
617 static unsigned int
618 thumb_expand_immediate (unsigned int imm)
619 {
620 unsigned int count = imm >> 7;
621
622 if (count < 8)
623 switch (count / 2)
624 {
625 case 0:
626 return imm & 0xff;
627 case 1:
628 return (imm & 0xff) | ((imm & 0xff) << 16);
629 case 2:
630 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
631 case 3:
632 return (imm & 0xff) | ((imm & 0xff) << 8)
633 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
634 }
635
636 return (0x80 | (imm & 0x7f)) << (32 - count);
637 }
638
639 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
640 epilogue, 0 otherwise. */
641
642 static int
643 thumb_instruction_restores_sp (unsigned short insn)
644 {
645 return (insn == 0x46bd /* mov sp, r7 */
646 || (insn & 0xff80) == 0xb000 /* add sp, imm */
647 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
648 }
649
650 /* Analyze a Thumb prologue, looking for a recognizable stack frame
651 and frame pointer. Scan until we encounter a store that could
652 clobber the stack frame unexpectedly, or an unknown instruction.
653 Return the last address which is definitely safe to skip for an
654 initial breakpoint. */
655
656 static CORE_ADDR
657 thumb_analyze_prologue (struct gdbarch *gdbarch,
658 CORE_ADDR start, CORE_ADDR limit,
659 struct arm_prologue_cache *cache)
660 {
661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
662 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
663 int i;
664 pv_t regs[16];
665 CORE_ADDR offset;
666 CORE_ADDR unrecognized_pc = 0;
667
668 for (i = 0; i < 16; i++)
669 regs[i] = pv_register (i, 0);
670 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
671
672 while (start < limit)
673 {
674 unsigned short insn;
675
676 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
677
678 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
679 {
680 int regno;
681 int mask;
682
683 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
684 break;
685
686 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
687 whether to save LR (R14). */
688 mask = (insn & 0xff) | ((insn & 0x100) << 6);
689
690 /* Calculate offsets of saved R0-R7 and LR. */
691 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
692 if (mask & (1 << regno))
693 {
694 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
695 -4);
696 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
697 }
698 }
699 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
700 {
701 offset = (insn & 0x7f) << 2; /* get scaled offset */
702 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
703 -offset);
704 }
705 else if (thumb_instruction_restores_sp (insn))
706 {
707 /* Don't scan past the epilogue. */
708 break;
709 }
710 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
711 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
712 (insn & 0xff) << 2);
713 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
714 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
715 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
716 bits (insn, 6, 8));
717 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
718 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
719 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
720 bits (insn, 0, 7));
721 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
722 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
723 && pv_is_constant (regs[bits (insn, 3, 5)]))
724 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
725 regs[bits (insn, 6, 8)]);
726 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
727 && pv_is_constant (regs[bits (insn, 3, 6)]))
728 {
729 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
730 int rm = bits (insn, 3, 6);
731 regs[rd] = pv_add (regs[rd], regs[rm]);
732 }
733 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
734 {
735 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
736 int src_reg = (insn & 0x78) >> 3;
737 regs[dst_reg] = regs[src_reg];
738 }
739 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
740 {
741 /* Handle stores to the stack. Normally pushes are used,
742 but with GCC -mtpcs-frame, there may be other stores
743 in the prologue to create the frame. */
744 int regno = (insn >> 8) & 0x7;
745 pv_t addr;
746
747 offset = (insn & 0xff) << 2;
748 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
749
750 if (stack.store_would_trash (addr))
751 break;
752
753 stack.store (addr, 4, regs[regno]);
754 }
755 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
756 {
757 int rd = bits (insn, 0, 2);
758 int rn = bits (insn, 3, 5);
759 pv_t addr;
760
761 offset = bits (insn, 6, 10) << 2;
762 addr = pv_add_constant (regs[rn], offset);
763
764 if (stack.store_would_trash (addr))
765 break;
766
767 stack.store (addr, 4, regs[rd]);
768 }
769 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
770 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
771 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
772 /* Ignore stores of argument registers to the stack. */
773 ;
774 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
775 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
776 /* Ignore block loads from the stack, potentially copying
777 parameters from memory. */
778 ;
779 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
780 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
781 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
782 /* Similarly ignore single loads from the stack. */
783 ;
784 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
785 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
786 /* Skip register copies, i.e. saves to another register
787 instead of the stack. */
788 ;
789 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
790 /* Recognize constant loads; even with small stacks these are necessary
791 on Thumb. */
792 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
793 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
794 {
795 /* Constant pool loads, for the same reason. */
796 unsigned int constant;
797 CORE_ADDR loc;
798
799 loc = start + 4 + bits (insn, 0, 7) * 4;
800 constant = read_memory_unsigned_integer (loc, 4, byte_order);
801 regs[bits (insn, 8, 10)] = pv_constant (constant);
802 }
803 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
804 {
805 unsigned short inst2;
806
807 inst2 = read_code_unsigned_integer (start + 2, 2,
808 byte_order_for_code);
809
810 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
811 {
812 /* BL, BLX. Allow some special function calls when
813 skipping the prologue; GCC generates these before
814 storing arguments to the stack. */
815 CORE_ADDR nextpc;
816 int j1, j2, imm1, imm2;
817
818 imm1 = sbits (insn, 0, 10);
819 imm2 = bits (inst2, 0, 10);
820 j1 = bit (inst2, 13);
821 j2 = bit (inst2, 11);
822
823 offset = ((imm1 << 12) + (imm2 << 1));
824 offset ^= ((!j2) << 22) | ((!j1) << 23);
825
826 nextpc = start + 4 + offset;
827 /* For BLX make sure to clear the low bits. */
828 if (bit (inst2, 12) == 0)
829 nextpc = nextpc & 0xfffffffc;
830
831 if (!skip_prologue_function (gdbarch, nextpc,
832 bit (inst2, 12) != 0))
833 break;
834 }
835
836 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
837 { registers } */
838 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
839 {
840 pv_t addr = regs[bits (insn, 0, 3)];
841 int regno;
842
843 if (stack.store_would_trash (addr))
844 break;
845
846 /* Calculate offsets of saved registers. */
847 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
848 if (inst2 & (1 << regno))
849 {
850 addr = pv_add_constant (addr, -4);
851 stack.store (addr, 4, regs[regno]);
852 }
853
854 if (insn & 0x0020)
855 regs[bits (insn, 0, 3)] = addr;
856 }
857
858 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
859 [Rn, #+/-imm]{!} */
860 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
861 {
862 int regno1 = bits (inst2, 12, 15);
863 int regno2 = bits (inst2, 8, 11);
864 pv_t addr = regs[bits (insn, 0, 3)];
865
866 offset = inst2 & 0xff;
867 if (insn & 0x0080)
868 addr = pv_add_constant (addr, offset);
869 else
870 addr = pv_add_constant (addr, -offset);
871
872 if (stack.store_would_trash (addr))
873 break;
874
875 stack.store (addr, 4, regs[regno1]);
876 stack.store (pv_add_constant (addr, 4),
877 4, regs[regno2]);
878
879 if (insn & 0x0020)
880 regs[bits (insn, 0, 3)] = addr;
881 }
882
883 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
884 && (inst2 & 0x0c00) == 0x0c00
885 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
886 {
887 int regno = bits (inst2, 12, 15);
888 pv_t addr = regs[bits (insn, 0, 3)];
889
890 offset = inst2 & 0xff;
891 if (inst2 & 0x0200)
892 addr = pv_add_constant (addr, offset);
893 else
894 addr = pv_add_constant (addr, -offset);
895
896 if (stack.store_would_trash (addr))
897 break;
898
899 stack.store (addr, 4, regs[regno]);
900
901 if (inst2 & 0x0100)
902 regs[bits (insn, 0, 3)] = addr;
903 }
904
905 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
906 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
907 {
908 int regno = bits (inst2, 12, 15);
909 pv_t addr;
910
911 offset = inst2 & 0xfff;
912 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
913
914 if (stack.store_would_trash (addr))
915 break;
916
917 stack.store (addr, 4, regs[regno]);
918 }
919
920 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
922 /* Ignore stores of argument registers to the stack. */
923 ;
924
925 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
926 && (inst2 & 0x0d00) == 0x0c00
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Ignore stores of argument registers to the stack. */
929 ;
930
931 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
932 { registers } */
933 && (inst2 & 0x8000) == 0x0000
934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
935 /* Ignore block loads from the stack, potentially copying
936 parameters from memory. */
937 ;
938
939 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
940 [Rn, #+/-imm] */
941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
942 /* Similarly ignore dual loads from the stack. */
943 ;
944
945 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
946 && (inst2 & 0x0d00) == 0x0c00
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 /* Similarly ignore single loads from the stack. */
949 ;
950
951 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
952 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
953 /* Similarly ignore single loads from the stack. */
954 ;
955
956 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
957 && (inst2 & 0x8000) == 0x0000)
958 {
959 unsigned int imm = ((bits (insn, 10, 10) << 11)
960 | (bits (inst2, 12, 14) << 8)
961 | bits (inst2, 0, 7));
962
963 regs[bits (inst2, 8, 11)]
964 = pv_add_constant (regs[bits (insn, 0, 3)],
965 thumb_expand_immediate (imm));
966 }
967
968 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
969 && (inst2 & 0x8000) == 0x0000)
970 {
971 unsigned int imm = ((bits (insn, 10, 10) << 11)
972 | (bits (inst2, 12, 14) << 8)
973 | bits (inst2, 0, 7));
974
975 regs[bits (inst2, 8, 11)]
976 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
977 }
978
979 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
980 && (inst2 & 0x8000) == 0x0000)
981 {
982 unsigned int imm = ((bits (insn, 10, 10) << 11)
983 | (bits (inst2, 12, 14) << 8)
984 | bits (inst2, 0, 7));
985
986 regs[bits (inst2, 8, 11)]
987 = pv_add_constant (regs[bits (insn, 0, 3)],
988 - (CORE_ADDR) thumb_expand_immediate (imm));
989 }
990
991 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
992 && (inst2 & 0x8000) == 0x0000)
993 {
994 unsigned int imm = ((bits (insn, 10, 10) << 11)
995 | (bits (inst2, 12, 14) << 8)
996 | bits (inst2, 0, 7));
997
998 regs[bits (inst2, 8, 11)]
999 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1000 }
1001
1002 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1003 {
1004 unsigned int imm = ((bits (insn, 10, 10) << 11)
1005 | (bits (inst2, 12, 14) << 8)
1006 | bits (inst2, 0, 7));
1007
1008 regs[bits (inst2, 8, 11)]
1009 = pv_constant (thumb_expand_immediate (imm));
1010 }
1011
1012 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1013 {
1014 unsigned int imm
1015 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1016
1017 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1018 }
1019
1020 else if (insn == 0xea5f /* mov.w Rd,Rm */
1021 && (inst2 & 0xf0f0) == 0)
1022 {
1023 int dst_reg = (inst2 & 0x0f00) >> 8;
1024 int src_reg = inst2 & 0xf;
1025 regs[dst_reg] = regs[src_reg];
1026 }
1027
1028 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1029 {
1030 /* Constant pool loads. */
1031 unsigned int constant;
1032 CORE_ADDR loc;
1033
1034 offset = bits (inst2, 0, 11);
1035 if (insn & 0x0080)
1036 loc = start + 4 + offset;
1037 else
1038 loc = start + 4 - offset;
1039
1040 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1041 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1042 }
1043
1044 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1045 {
1046 /* Constant pool loads. */
1047 unsigned int constant;
1048 CORE_ADDR loc;
1049
1050 offset = bits (inst2, 0, 7) << 2;
1051 if (insn & 0x0080)
1052 loc = start + 4 + offset;
1053 else
1054 loc = start + 4 - offset;
1055
1056 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1057 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1058
1059 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1060 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1061 }
1062
1063 else if (thumb2_instruction_changes_pc (insn, inst2))
1064 {
1065 /* Don't scan past anything that might change control flow. */
1066 break;
1067 }
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
1074
1075 start += 2;
1076 }
1077 else if (thumb_instruction_changes_pc (insn))
1078 {
1079 /* Don't scan past anything that might change control flow. */
1080 break;
1081 }
1082 else
1083 {
1084 /* The optimizer might shove anything into the prologue,
1085 so we just skip what we don't recognize. */
1086 unrecognized_pc = start;
1087 }
1088
1089 start += 2;
1090 }
1091
1092 if (arm_debug)
1093 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1094 paddress (gdbarch, start));
1095
1096 if (unrecognized_pc == 0)
1097 unrecognized_pc = start;
1098
1099 if (cache == NULL)
1100 return unrecognized_pc;
1101
1102 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache->framereg = ARM_FP_REGNUM;
1106 cache->framesize = -regs[ARM_FP_REGNUM].k;
1107 }
1108 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1109 {
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache->framereg = THUMB_FP_REGNUM;
1112 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1113 }
1114 else
1115 {
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache->framereg = ARM_SP_REGNUM;
1118 cache->framesize = -regs[ARM_SP_REGNUM].k;
1119 }
1120
1121 for (i = 0; i < 16; i++)
1122 if (stack.find_reg (gdbarch, i, &offset))
1123 cache->saved_regs[i].addr = offset;
1124
1125 return unrecognized_pc;
1126 }
1127
1128
1129 /* Try to analyze the instructions starting from PC, which load symbol
1130 __stack_chk_guard. Return the address of instruction after loading this
1131 symbol, set the dest register number to *BASEREG, and set the size of
1132 instructions for loading symbol in OFFSET. Return 0 if instructions are
1133 not recognized. */
1134
1135 static CORE_ADDR
1136 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1137 unsigned int *destreg, int *offset)
1138 {
1139 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1140 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1141 unsigned int low, high, address;
1142
1143 address = 0;
1144 if (is_thumb)
1145 {
1146 unsigned short insn1
1147 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1148
1149 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1150 {
1151 *destreg = bits (insn1, 8, 10);
1152 *offset = 2;
1153 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1154 address = read_memory_unsigned_integer (address, 4,
1155 byte_order_for_code);
1156 }
1157 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1158 {
1159 unsigned short insn2
1160 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1161
1162 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1163
1164 insn1
1165 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1166 insn2
1167 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1168
1169 /* movt Rd, #const */
1170 if ((insn1 & 0xfbc0) == 0xf2c0)
1171 {
1172 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1173 *destreg = bits (insn2, 8, 11);
1174 *offset = 8;
1175 address = (high << 16 | low);
1176 }
1177 }
1178 }
1179 else
1180 {
1181 unsigned int insn
1182 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1183
1184 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1185 {
1186 address = bits (insn, 0, 11) + pc + 8;
1187 address = read_memory_unsigned_integer (address, 4,
1188 byte_order_for_code);
1189
1190 *destreg = bits (insn, 12, 15);
1191 *offset = 4;
1192 }
1193 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1194 {
1195 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1196
1197 insn
1198 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1199
1200 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1201 {
1202 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1203 *destreg = bits (insn, 12, 15);
1204 *offset = 8;
1205 address = (high << 16 | low);
1206 }
1207 }
1208 }
1209
1210 return address;
1211 }
1212
1213 /* Try to skip a sequence of instructions used for stack protector. If PC
1214 points to the first instruction of this sequence, return the address of
1215 first instruction after this sequence, otherwise, return original PC.
1216
1217 On arm, this sequence of instructions is composed of mainly three steps,
1218 Step 1: load symbol __stack_chk_guard,
1219 Step 2: load from address of __stack_chk_guard,
1220 Step 3: store it to somewhere else.
1221
1222 Usually, instructions on step 2 and step 3 are the same on various ARM
1223 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1224 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1225 instructions in step 1 vary from different ARM architectures. On ARMv7,
1226 they are,
1227
1228 movw Rn, #:lower16:__stack_chk_guard
1229 movt Rn, #:upper16:__stack_chk_guard
1230
1231 On ARMv5t, it is,
1232
1233 ldr Rn, .Label
1234 ....
1235 .Lable:
1236 .word __stack_chk_guard
1237
1238 Since ldr/str is a very popular instruction, we can't use them as
1239 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1240 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1241 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1242
1243 static CORE_ADDR
1244 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1245 {
1246 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1247 unsigned int basereg;
1248 struct bound_minimal_symbol stack_chk_guard;
1249 int offset;
1250 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1251 CORE_ADDR addr;
1252
1253 /* Try to parse the instructions in Step 1. */
1254 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1255 &basereg, &offset);
1256 if (!addr)
1257 return pc;
1258
1259 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1260 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1261 Otherwise, this sequence cannot be for stack protector. */
1262 if (stack_chk_guard.minsym == NULL
1263 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1264 return pc;
1265
1266 if (is_thumb)
1267 {
1268 unsigned int destreg;
1269 unsigned short insn
1270 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1271
1272 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1273 if ((insn & 0xf800) != 0x6800)
1274 return pc;
1275 if (bits (insn, 3, 5) != basereg)
1276 return pc;
1277 destreg = bits (insn, 0, 2);
1278
1279 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1280 byte_order_for_code);
1281 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1282 if ((insn & 0xf800) != 0x6000)
1283 return pc;
1284 if (destreg != bits (insn, 0, 2))
1285 return pc;
1286 }
1287 else
1288 {
1289 unsigned int destreg;
1290 unsigned int insn
1291 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1292
1293 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1294 if ((insn & 0x0e500000) != 0x04100000)
1295 return pc;
1296 if (bits (insn, 16, 19) != basereg)
1297 return pc;
1298 destreg = bits (insn, 12, 15);
1299 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1300 insn = read_code_unsigned_integer (pc + offset + 4,
1301 4, byte_order_for_code);
1302 if ((insn & 0x0e500000) != 0x04000000)
1303 return pc;
1304 if (bits (insn, 12, 15) != destreg)
1305 return pc;
1306 }
1307 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1308 on arm. */
1309 if (is_thumb)
1310 return pc + offset + 4;
1311 else
1312 return pc + offset + 8;
1313 }
1314
1315 /* Advance the PC across any function entry prologue instructions to
1316 reach some "real" code.
1317
1318 The APCS (ARM Procedure Call Standard) defines the following
1319 prologue:
1320
1321 mov ip, sp
1322 [stmfd sp!, {a1,a2,a3,a4}]
1323 stmfd sp!, {...,fp,ip,lr,pc}
1324 [stfe f7, [sp, #-12]!]
1325 [stfe f6, [sp, #-12]!]
1326 [stfe f5, [sp, #-12]!]
1327 [stfe f4, [sp, #-12]!]
1328 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1329
1330 static CORE_ADDR
1331 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1332 {
1333 CORE_ADDR func_addr, limit_pc;
1334
1335 /* See if we can determine the end of the prologue via the symbol table.
1336 If so, then return either PC, or the PC after the prologue, whichever
1337 is greater. */
1338 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1339 {
1340 CORE_ADDR post_prologue_pc
1341 = skip_prologue_using_sal (gdbarch, func_addr);
1342 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1343
1344 if (post_prologue_pc)
1345 post_prologue_pc
1346 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1347
1348
1349 /* GCC always emits a line note before the prologue and another
1350 one after, even if the two are at the same address or on the
1351 same line. Take advantage of this so that we do not need to
1352 know every instruction that might appear in the prologue. We
1353 will have producer information for most binaries; if it is
1354 missing (e.g. for -gstabs), assuming the GNU tools. */
1355 if (post_prologue_pc
1356 && (cust == NULL
1357 || COMPUNIT_PRODUCER (cust) == NULL
1358 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1359 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1360 return post_prologue_pc;
1361
1362 if (post_prologue_pc != 0)
1363 {
1364 CORE_ADDR analyzed_limit;
1365
1366 /* For non-GCC compilers, make sure the entire line is an
1367 acceptable prologue; GDB will round this function's
1368 return value up to the end of the following line so we
1369 can not skip just part of a line (and we do not want to).
1370
1371 RealView does not treat the prologue specially, but does
1372 associate prologue code with the opening brace; so this
1373 lets us skip the first line if we think it is the opening
1374 brace. */
1375 if (arm_pc_is_thumb (gdbarch, func_addr))
1376 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1377 post_prologue_pc, NULL);
1378 else
1379 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1380 post_prologue_pc, NULL);
1381
1382 if (analyzed_limit != post_prologue_pc)
1383 return func_addr;
1384
1385 return post_prologue_pc;
1386 }
1387 }
1388
1389 /* Can't determine prologue from the symbol table, need to examine
1390 instructions. */
1391
1392 /* Find an upper limit on the function prologue using the debug
1393 information. If the debug information could not be used to provide
1394 that bound, then use an arbitrary large number as the upper bound. */
1395 /* Like arm_scan_prologue, stop no later than pc + 64. */
1396 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1397 if (limit_pc == 0)
1398 limit_pc = pc + 64; /* Magic. */
1399
1400
1401 /* Check if this is Thumb code. */
1402 if (arm_pc_is_thumb (gdbarch, pc))
1403 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1404 else
1405 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1406 }
1407
1408 /* *INDENT-OFF* */
1409 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1410 This function decodes a Thumb function prologue to determine:
1411 1) the size of the stack frame
1412 2) which registers are saved on it
1413 3) the offsets of saved regs
1414 4) the offset from the stack pointer to the frame pointer
1415
1416 A typical Thumb function prologue would create this stack frame
1417 (offsets relative to FP)
1418 old SP -> 24 stack parameters
1419 20 LR
1420 16 R7
1421 R7 -> 0 local variables (16 bytes)
1422 SP -> -12 additional stack space (12 bytes)
1423 The frame size would thus be 36 bytes, and the frame offset would be
1424 12 bytes. The frame register is R7.
1425
1426 The comments for thumb_skip_prolog() describe the algorithm we use
1427 to detect the end of the prolog. */
1428 /* *INDENT-ON* */
1429
1430 static void
1431 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1432 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1433 {
1434 CORE_ADDR prologue_start;
1435 CORE_ADDR prologue_end;
1436
1437 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1438 &prologue_end))
1439 {
1440 /* See comment in arm_scan_prologue for an explanation of
1441 this heuristics. */
1442 if (prologue_end > prologue_start + 64)
1443 {
1444 prologue_end = prologue_start + 64;
1445 }
1446 }
1447 else
1448 /* We're in the boondocks: we have no idea where the start of the
1449 function is. */
1450 return;
1451
1452 prologue_end = std::min (prologue_end, prev_pc);
1453
1454 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1455 }
1456
1457 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1458 otherwise. */
1459
1460 static int
1461 arm_instruction_restores_sp (unsigned int insn)
1462 {
1463 if (bits (insn, 28, 31) != INST_NV)
1464 {
1465 if ((insn & 0x0df0f000) == 0x0080d000
1466 /* ADD SP (register or immediate). */
1467 || (insn & 0x0df0f000) == 0x0040d000
1468 /* SUB SP (register or immediate). */
1469 || (insn & 0x0ffffff0) == 0x01a0d000
1470 /* MOV SP. */
1471 || (insn & 0x0fff0000) == 0x08bd0000
1472 /* POP (LDMIA). */
1473 || (insn & 0x0fff0000) == 0x049d0000)
1474 /* POP of a single register. */
1475 return 1;
1476 }
1477
1478 return 0;
1479 }
1480
1481 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1482 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1483 fill it in. Return the first address not recognized as a prologue
1484 instruction.
1485
1486 We recognize all the instructions typically found in ARM prologues,
1487 plus harmless instructions which can be skipped (either for analysis
1488 purposes, or a more restrictive set that can be skipped when finding
1489 the end of the prologue). */
1490
1491 static CORE_ADDR
1492 arm_analyze_prologue (struct gdbarch *gdbarch,
1493 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1494 struct arm_prologue_cache *cache)
1495 {
1496 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1497 int regno;
1498 CORE_ADDR offset, current_pc;
1499 pv_t regs[ARM_FPS_REGNUM];
1500 CORE_ADDR unrecognized_pc = 0;
1501
1502 /* Search the prologue looking for instructions that set up the
1503 frame pointer, adjust the stack pointer, and save registers.
1504
1505 Be careful, however, and if it doesn't look like a prologue,
1506 don't try to scan it. If, for instance, a frameless function
1507 begins with stmfd sp!, then we will tell ourselves there is
1508 a frame, which will confuse stack traceback, as well as "finish"
1509 and other operations that rely on a knowledge of the stack
1510 traceback. */
1511
1512 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1513 regs[regno] = pv_register (regno, 0);
1514 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1515
1516 for (current_pc = prologue_start;
1517 current_pc < prologue_end;
1518 current_pc += 4)
1519 {
1520 unsigned int insn
1521 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1522
1523 if (insn == 0xe1a0c00d) /* mov ip, sp */
1524 {
1525 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1526 continue;
1527 }
1528 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1530 {
1531 unsigned imm = insn & 0xff; /* immediate value */
1532 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1533 int rd = bits (insn, 12, 15);
1534 imm = (imm >> rot) | (imm << (32 - rot));
1535 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1536 continue;
1537 }
1538 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1540 {
1541 unsigned imm = insn & 0xff; /* immediate value */
1542 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1543 int rd = bits (insn, 12, 15);
1544 imm = (imm >> rot) | (imm << (32 - rot));
1545 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1546 continue;
1547 }
1548 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1549 [sp, #-4]! */
1550 {
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1552 break;
1553 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1554 stack.store (regs[ARM_SP_REGNUM], 4,
1555 regs[bits (insn, 12, 15)]);
1556 continue;
1557 }
1558 else if ((insn & 0xffff0000) == 0xe92d0000)
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1560 or
1561 stmfd sp!, {a1, a2, a3, a4} */
1562 {
1563 int mask = insn & 0xffff;
1564
1565 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1566 break;
1567
1568 /* Calculate offsets of saved registers. */
1569 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1570 if (mask & (1 << regno))
1571 {
1572 regs[ARM_SP_REGNUM]
1573 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1574 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1575 }
1576 }
1577 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1579 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1580 {
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1582 continue;
1583 }
1584 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1586 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1587 {
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1589 continue;
1590 }
1591 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1592 { registers } */
1593 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1594 {
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1596 continue;
1597 }
1598 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1599 {
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1602 imm = (imm >> rot) | (imm << (32 - rot));
1603 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1604 }
1605 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1606 {
1607 unsigned imm = insn & 0xff; /* immediate value */
1608 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1609 imm = (imm >> rot) | (imm << (32 - rot));
1610 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1611 }
1612 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1613 [sp, -#c]! */
1614 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1615 {
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1617 break;
1618
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1620 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1621 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1622 }
1623 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1624 [sp!] */
1625 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1626 {
1627 int n_saved_fp_regs;
1628 unsigned int fp_start_reg, fp_bound_reg;
1629
1630 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1631 break;
1632
1633 if ((insn & 0x800) == 0x800) /* N0 is set */
1634 {
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 3;
1637 else
1638 n_saved_fp_regs = 1;
1639 }
1640 else
1641 {
1642 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs = 2;
1644 else
1645 n_saved_fp_regs = 4;
1646 }
1647
1648 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1649 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1650 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1651 {
1652 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1653 stack.store (regs[ARM_SP_REGNUM], 12,
1654 regs[fp_start_reg++]);
1655 }
1656 }
1657 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1658 {
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1661 the stack. */
1662 CORE_ADDR dest = BranchDest (current_pc, insn);
1663
1664 if (skip_prologue_function (gdbarch, dest, 0))
1665 continue;
1666 else
1667 break;
1668 }
1669 else if ((insn & 0xf0000000) != 0xe0000000)
1670 break; /* Condition not true, exit early. */
1671 else if (arm_instruction_changes_pc (insn))
1672 /* Don't scan past anything that might change control flow. */
1673 break;
1674 else if (arm_instruction_restores_sp (insn))
1675 {
1676 /* Don't scan past the epilogue. */
1677 break;
1678 }
1679 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1683 continue;
1684 else if ((insn & 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 /* Similarly ignore single loads from the stack. */
1687 continue;
1688 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
1691 continue;
1692 else
1693 {
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1699 instruction. */
1700 unrecognized_pc = current_pc;
1701 if (cache != NULL)
1702 continue;
1703 else
1704 break;
1705 }
1706 }
1707
1708 if (unrecognized_pc == 0)
1709 unrecognized_pc = current_pc;
1710
1711 if (cache)
1712 {
1713 int framereg, framesize;
1714
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1718 {
1719 /* Frame pointer is fp. */
1720 framereg = ARM_FP_REGNUM;
1721 framesize = -regs[ARM_FP_REGNUM].k;
1722 }
1723 else
1724 {
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg = ARM_SP_REGNUM;
1727 framesize = -regs[ARM_SP_REGNUM].k;
1728 }
1729
1730 cache->framereg = framereg;
1731 cache->framesize = framesize;
1732
1733 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1734 if (stack.find_reg (gdbarch, regno, &offset))
1735 cache->saved_regs[regno].addr = offset;
1736 }
1737
1738 if (arm_debug)
1739 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch, unrecognized_pc));
1741
1742 return unrecognized_pc;
1743 }
1744
1745 static void
1746 arm_scan_prologue (struct frame_info *this_frame,
1747 struct arm_prologue_cache *cache)
1748 {
1749 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1751 CORE_ADDR prologue_start, prologue_end;
1752 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1753 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1754
1755 /* Assume there is no frame until proven otherwise. */
1756 cache->framereg = ARM_SP_REGNUM;
1757 cache->framesize = 0;
1758
1759 /* Check for Thumb prologue. */
1760 if (arm_frame_is_thumb (this_frame))
1761 {
1762 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1763 return;
1764 }
1765
1766 /* Find the function prologue. If we can't find the function in
1767 the symbol table, peek in the stack frame to find the PC. */
1768 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1769 &prologue_end))
1770 {
1771 /* One way to find the end of the prologue (which works well
1772 for unoptimized code) is to do the following:
1773
1774 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1775
1776 if (sal.line == 0)
1777 prologue_end = prev_pc;
1778 else if (sal.end < prologue_end)
1779 prologue_end = sal.end;
1780
1781 This mechanism is very accurate so long as the optimizer
1782 doesn't move any instructions from the function body into the
1783 prologue. If this happens, sal.end will be the last
1784 instruction in the first hunk of prologue code just before
1785 the first instruction that the scheduler has moved from
1786 the body to the prologue.
1787
1788 In order to make sure that we scan all of the prologue
1789 instructions, we use a slightly less accurate mechanism which
1790 may scan more than necessary. To help compensate for this
1791 lack of accuracy, the prologue scanning loop below contains
1792 several clauses which'll cause the loop to terminate early if
1793 an implausible prologue instruction is encountered.
1794
1795 The expression
1796
1797 prologue_start + 64
1798
1799 is a suitable endpoint since it accounts for the largest
1800 possible prologue plus up to five instructions inserted by
1801 the scheduler. */
1802
1803 if (prologue_end > prologue_start + 64)
1804 {
1805 prologue_end = prologue_start + 64; /* See above. */
1806 }
1807 }
1808 else
1809 {
1810 /* We have no symbol information. Our only option is to assume this
1811 function has a standard stack frame and the normal frame register.
1812 Then, we can find the value of our frame pointer on entrance to
1813 the callee (or at the present moment if this is the innermost frame).
1814 The value stored there should be the address of the stmfd + 8. */
1815 CORE_ADDR frame_loc;
1816 ULONGEST return_value;
1817
1818 /* AAPCS does not use a frame register, so we can abort here. */
1819 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1820 return;
1821
1822 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1823 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1824 &return_value))
1825 return;
1826 else
1827 {
1828 prologue_start = gdbarch_addr_bits_remove
1829 (gdbarch, return_value) - 8;
1830 prologue_end = prologue_start + 64; /* See above. */
1831 }
1832 }
1833
1834 if (prev_pc < prologue_end)
1835 prologue_end = prev_pc;
1836
1837 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1838 }
1839
1840 static struct arm_prologue_cache *
1841 arm_make_prologue_cache (struct frame_info *this_frame)
1842 {
1843 int reg;
1844 struct arm_prologue_cache *cache;
1845 CORE_ADDR unwound_fp;
1846
1847 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1848 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1849
1850 arm_scan_prologue (this_frame, cache);
1851
1852 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1853 if (unwound_fp == 0)
1854 return cache;
1855
1856 cache->prev_sp = unwound_fp + cache->framesize;
1857
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
1860 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1861 if (trad_frame_addr_p (cache->saved_regs, reg))
1862 cache->saved_regs[reg].addr += cache->prev_sp;
1863
1864 return cache;
1865 }
1866
1867 /* Implementation of the stop_reason hook for arm_prologue frames. */
1868
1869 static enum unwind_stop_reason
1870 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1871 void **this_cache)
1872 {
1873 struct arm_prologue_cache *cache;
1874 CORE_ADDR pc;
1875
1876 if (*this_cache == NULL)
1877 *this_cache = arm_make_prologue_cache (this_frame);
1878 cache = (struct arm_prologue_cache *) *this_cache;
1879
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc = get_frame_pc (this_frame);
1882 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1883 return UNWIND_OUTERMOST;
1884
1885 /* If we've hit a wall, stop. */
1886 if (cache->prev_sp == 0)
1887 return UNWIND_OUTERMOST;
1888
1889 return UNWIND_NO_REASON;
1890 }
1891
1892 /* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
1894
1895 static void
1896 arm_prologue_this_id (struct frame_info *this_frame,
1897 void **this_cache,
1898 struct frame_id *this_id)
1899 {
1900 struct arm_prologue_cache *cache;
1901 struct frame_id id;
1902 CORE_ADDR pc, func;
1903
1904 if (*this_cache == NULL)
1905 *this_cache = arm_make_prologue_cache (this_frame);
1906 cache = (struct arm_prologue_cache *) *this_cache;
1907
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
1911 pc = get_frame_pc (this_frame);
1912 func = get_frame_func (this_frame);
1913 if (!func)
1914 func = pc;
1915
1916 id = frame_id_build (cache->prev_sp, func);
1917 *this_id = id;
1918 }
1919
1920 static struct value *
1921 arm_prologue_prev_register (struct frame_info *this_frame,
1922 void **this_cache,
1923 int prev_regnum)
1924 {
1925 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1926 struct arm_prologue_cache *cache;
1927
1928 if (*this_cache == NULL)
1929 *this_cache = arm_make_prologue_cache (this_frame);
1930 cache = (struct arm_prologue_cache *) *this_cache;
1931
1932 /* If we are asked to unwind the PC, then we need to return the LR
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
1937 if (prev_regnum == ARM_PC_REGNUM)
1938 {
1939 CORE_ADDR lr;
1940
1941 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1942 return frame_unwind_got_constant (this_frame, prev_regnum,
1943 arm_addr_bits_remove (gdbarch, lr));
1944 }
1945
1946 /* SP is generally not saved to the stack, but this frame is
1947 identified by the next frame's stack pointer at the time of the call.
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum == ARM_SP_REGNUM)
1950 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1951
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum == ARM_PS_REGNUM)
1961 {
1962 CORE_ADDR lr, cpsr;
1963 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1964
1965 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1966 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1967 if (IS_THUMB_ADDR (lr))
1968 cpsr |= t_bit;
1969 else
1970 cpsr &= ~t_bit;
1971 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1972 }
1973
1974 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1975 prev_regnum);
1976 }
1977
1978 struct frame_unwind arm_prologue_unwind = {
1979 NORMAL_FRAME,
1980 arm_prologue_unwind_stop_reason,
1981 arm_prologue_this_id,
1982 arm_prologue_prev_register,
1983 NULL,
1984 default_frame_sniffer
1985 };
1986
1987 /* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1991
1992 static const struct objfile_data *arm_exidx_data_key;
1993
1994 struct arm_exidx_entry
1995 {
1996 bfd_vma addr;
1997 gdb_byte *entry;
1998 };
1999 typedef struct arm_exidx_entry arm_exidx_entry_s;
2000 DEF_VEC_O(arm_exidx_entry_s);
2001
2002 struct arm_exidx_data
2003 {
2004 VEC(arm_exidx_entry_s) **section_maps;
2005 };
2006
2007 static void
2008 arm_exidx_data_free (struct objfile *objfile, void *arg)
2009 {
2010 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2011 unsigned int i;
2012
2013 for (i = 0; i < objfile->obfd->section_count; i++)
2014 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2015 }
2016
2017 static inline int
2018 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2019 const struct arm_exidx_entry *rhs)
2020 {
2021 return lhs->addr < rhs->addr;
2022 }
2023
2024 static struct obj_section *
2025 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2026 {
2027 struct obj_section *osect;
2028
2029 ALL_OBJFILE_OSECTIONS (objfile, osect)
2030 if (bfd_get_section_flags (objfile->obfd,
2031 osect->the_bfd_section) & SEC_ALLOC)
2032 {
2033 bfd_vma start, size;
2034 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2035 size = bfd_get_section_size (osect->the_bfd_section);
2036
2037 if (start <= vma && vma < start + size)
2038 return osect;
2039 }
2040
2041 return NULL;
2042 }
2043
2044 /* Parse contents of exception table and exception index sections
2045 of OBJFILE, and fill in the exception table entry cache.
2046
2047 For each entry that refers to a standard ARM-defined personality
2048 routine, extract the frame unwinding instructions (from either
2049 the index or the table section). The unwinding instructions
2050 are normalized by:
2051 - extracting them from the rest of the table data
2052 - converting to host endianness
2053 - appending the implicit 0xb0 ("Finish") code
2054
2055 The extracted and normalized instructions are stored for later
2056 retrieval by the arm_find_exidx_entry routine. */
2057
2058 static void
2059 arm_exidx_new_objfile (struct objfile *objfile)
2060 {
2061 struct arm_exidx_data *data;
2062 asection *exidx, *extab;
2063 bfd_vma exidx_vma = 0, extab_vma = 0;
2064 LONGEST i;
2065
2066 /* If we've already touched this file, do nothing. */
2067 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2068 return;
2069
2070 /* Read contents of exception table and index. */
2071 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2072 gdb::byte_vector exidx_data;
2073 if (exidx)
2074 {
2075 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2076 exidx_data.resize (bfd_get_section_size (exidx));
2077
2078 if (!bfd_get_section_contents (objfile->obfd, exidx,
2079 exidx_data.data (), 0,
2080 exidx_data.size ()))
2081 return;
2082 }
2083
2084 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2085 gdb::byte_vector extab_data;
2086 if (extab)
2087 {
2088 extab_vma = bfd_section_vma (objfile->obfd, extab);
2089 extab_data.resize (bfd_get_section_size (extab));
2090
2091 if (!bfd_get_section_contents (objfile->obfd, extab,
2092 extab_data.data (), 0,
2093 extab_data.size ()))
2094 return;
2095 }
2096
2097 /* Allocate exception table data structure. */
2098 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2099 set_objfile_data (objfile, arm_exidx_data_key, data);
2100 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2101 objfile->obfd->section_count,
2102 VEC(arm_exidx_entry_s) *);
2103
2104 /* Fill in exception table. */
2105 for (i = 0; i < exidx_data.size () / 8; i++)
2106 {
2107 struct arm_exidx_entry new_exidx_entry;
2108 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2109 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2110 exidx_data.data () + i * 8 + 4);
2111 bfd_vma addr = 0, word = 0;
2112 int n_bytes = 0, n_words = 0;
2113 struct obj_section *sec;
2114 gdb_byte *entry = NULL;
2115
2116 /* Extract address of start of function. */
2117 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 idx += exidx_vma + i * 8;
2119
2120 /* Find section containing function and compute section offset. */
2121 sec = arm_obj_section_from_vma (objfile, idx);
2122 if (sec == NULL)
2123 continue;
2124 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2125
2126 /* Determine address of exception table entry. */
2127 if (val == 1)
2128 {
2129 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2130 }
2131 else if ((val & 0xff000000) == 0x80000000)
2132 {
2133 /* Exception table entry embedded in .ARM.exidx
2134 -- must be short form. */
2135 word = val;
2136 n_bytes = 3;
2137 }
2138 else if (!(val & 0x80000000))
2139 {
2140 /* Exception table entry in .ARM.extab. */
2141 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2142 addr += exidx_vma + i * 8 + 4;
2143
2144 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2145 {
2146 word = bfd_h_get_32 (objfile->obfd,
2147 extab_data.data () + addr - extab_vma);
2148 addr += 4;
2149
2150 if ((word & 0xff000000) == 0x80000000)
2151 {
2152 /* Short form. */
2153 n_bytes = 3;
2154 }
2155 else if ((word & 0xff000000) == 0x81000000
2156 || (word & 0xff000000) == 0x82000000)
2157 {
2158 /* Long form. */
2159 n_bytes = 2;
2160 n_words = ((word >> 16) & 0xff);
2161 }
2162 else if (!(word & 0x80000000))
2163 {
2164 bfd_vma pers;
2165 struct obj_section *pers_sec;
2166 int gnu_personality = 0;
2167
2168 /* Custom personality routine. */
2169 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2171
2172 /* Check whether we've got one of the variants of the
2173 GNU personality routines. */
2174 pers_sec = arm_obj_section_from_vma (objfile, pers);
2175 if (pers_sec)
2176 {
2177 static const char *personality[] =
2178 {
2179 "__gcc_personality_v0",
2180 "__gxx_personality_v0",
2181 "__gcj_personality_v0",
2182 "__gnu_objc_personality_v0",
2183 NULL
2184 };
2185
2186 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2187 int k;
2188
2189 for (k = 0; personality[k]; k++)
2190 if (lookup_minimal_symbol_by_pc_name
2191 (pc, personality[k], objfile))
2192 {
2193 gnu_personality = 1;
2194 break;
2195 }
2196 }
2197
2198 /* If so, the next word contains a word count in the high
2199 byte, followed by the same unwind instructions as the
2200 pre-defined forms. */
2201 if (gnu_personality
2202 && addr + 4 <= extab_vma + extab_data.size ())
2203 {
2204 word = bfd_h_get_32 (objfile->obfd,
2205 (extab_data.data ()
2206 + addr - extab_vma));
2207 addr += 4;
2208 n_bytes = 3;
2209 n_words = ((word >> 24) & 0xff);
2210 }
2211 }
2212 }
2213 }
2214
2215 /* Sanity check address. */
2216 if (n_words)
2217 if (addr < extab_vma
2218 || addr + 4 * n_words > extab_vma + extab_data.size ())
2219 n_words = n_bytes = 0;
2220
2221 /* The unwind instructions reside in WORD (only the N_BYTES least
2222 significant bytes are valid), followed by N_WORDS words in the
2223 extab section starting at ADDR. */
2224 if (n_bytes || n_words)
2225 {
2226 gdb_byte *p = entry
2227 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2228 n_bytes + n_words * 4 + 1);
2229
2230 while (n_bytes--)
2231 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2232
2233 while (n_words--)
2234 {
2235 word = bfd_h_get_32 (objfile->obfd,
2236 extab_data.data () + addr - extab_vma);
2237 addr += 4;
2238
2239 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2240 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2241 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2242 *p++ = (gdb_byte) (word & 0xff);
2243 }
2244
2245 /* Implied "Finish" to terminate the list. */
2246 *p++ = 0xb0;
2247 }
2248
2249 /* Push entry onto vector. They are guaranteed to always
2250 appear in order of increasing addresses. */
2251 new_exidx_entry.addr = idx;
2252 new_exidx_entry.entry = entry;
2253 VEC_safe_push (arm_exidx_entry_s,
2254 data->section_maps[sec->the_bfd_section->index],
2255 &new_exidx_entry);
2256 }
2257 }
2258
2259 /* Search for the exception table entry covering MEMADDR. If one is found,
2260 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2261 set *START to the start of the region covered by this entry. */
2262
2263 static gdb_byte *
2264 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2265 {
2266 struct obj_section *sec;
2267
2268 sec = find_pc_section (memaddr);
2269 if (sec != NULL)
2270 {
2271 struct arm_exidx_data *data;
2272 VEC(arm_exidx_entry_s) *map;
2273 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2274 unsigned int idx;
2275
2276 data = ((struct arm_exidx_data *)
2277 objfile_data (sec->objfile, arm_exidx_data_key));
2278 if (data != NULL)
2279 {
2280 map = data->section_maps[sec->the_bfd_section->index];
2281 if (!VEC_empty (arm_exidx_entry_s, map))
2282 {
2283 struct arm_exidx_entry *map_sym;
2284
2285 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2286 arm_compare_exidx_entries);
2287
2288 /* VEC_lower_bound finds the earliest ordered insertion
2289 point. If the following symbol starts at this exact
2290 address, we use that; otherwise, the preceding
2291 exception table entry covers this address. */
2292 if (idx < VEC_length (arm_exidx_entry_s, map))
2293 {
2294 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2295 if (map_sym->addr == map_key.addr)
2296 {
2297 if (start)
2298 *start = map_sym->addr + obj_section_addr (sec);
2299 return map_sym->entry;
2300 }
2301 }
2302
2303 if (idx > 0)
2304 {
2305 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2306 if (start)
2307 *start = map_sym->addr + obj_section_addr (sec);
2308 return map_sym->entry;
2309 }
2310 }
2311 }
2312 }
2313
2314 return NULL;
2315 }
2316
2317 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2318 instruction list from the ARM exception table entry ENTRY, allocate and
2319 return a prologue cache structure describing how to unwind this frame.
2320
2321 Return NULL if the unwinding instruction list contains a "spare",
2322 "reserved" or "refuse to unwind" instruction as defined in section
2323 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2324 for the ARM Architecture" document. */
2325
2326 static struct arm_prologue_cache *
2327 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2328 {
2329 CORE_ADDR vsp = 0;
2330 int vsp_valid = 0;
2331
2332 struct arm_prologue_cache *cache;
2333 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2334 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2335
2336 for (;;)
2337 {
2338 gdb_byte insn;
2339
2340 /* Whenever we reload SP, we actually have to retrieve its
2341 actual value in the current frame. */
2342 if (!vsp_valid)
2343 {
2344 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2345 {
2346 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2347 vsp = get_frame_register_unsigned (this_frame, reg);
2348 }
2349 else
2350 {
2351 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2352 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2353 }
2354
2355 vsp_valid = 1;
2356 }
2357
2358 /* Decode next unwind instruction. */
2359 insn = *entry++;
2360
2361 if ((insn & 0xc0) == 0)
2362 {
2363 int offset = insn & 0x3f;
2364 vsp += (offset << 2) + 4;
2365 }
2366 else if ((insn & 0xc0) == 0x40)
2367 {
2368 int offset = insn & 0x3f;
2369 vsp -= (offset << 2) + 4;
2370 }
2371 else if ((insn & 0xf0) == 0x80)
2372 {
2373 int mask = ((insn & 0xf) << 8) | *entry++;
2374 int i;
2375
2376 /* The special case of an all-zero mask identifies
2377 "Refuse to unwind". We return NULL to fall back
2378 to the prologue analyzer. */
2379 if (mask == 0)
2380 return NULL;
2381
2382 /* Pop registers r4..r15 under mask. */
2383 for (i = 0; i < 12; i++)
2384 if (mask & (1 << i))
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* Special-case popping SP -- we need to reload vsp. */
2391 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2392 vsp_valid = 0;
2393 }
2394 else if ((insn & 0xf0) == 0x90)
2395 {
2396 int reg = insn & 0xf;
2397
2398 /* Reserved cases. */
2399 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2400 return NULL;
2401
2402 /* Set SP from another register and mark VSP for reload. */
2403 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2404 vsp_valid = 0;
2405 }
2406 else if ((insn & 0xf0) == 0xa0)
2407 {
2408 int count = insn & 0x7;
2409 int pop_lr = (insn & 0x8) != 0;
2410 int i;
2411
2412 /* Pop r4..r[4+count]. */
2413 for (i = 0; i <= count; i++)
2414 {
2415 cache->saved_regs[4 + i].addr = vsp;
2416 vsp += 4;
2417 }
2418
2419 /* If indicated by flag, pop LR as well. */
2420 if (pop_lr)
2421 {
2422 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2423 vsp += 4;
2424 }
2425 }
2426 else if (insn == 0xb0)
2427 {
2428 /* We could only have updated PC by popping into it; if so, it
2429 will show up as address. Otherwise, copy LR into PC. */
2430 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2431 cache->saved_regs[ARM_PC_REGNUM]
2432 = cache->saved_regs[ARM_LR_REGNUM];
2433
2434 /* We're done. */
2435 break;
2436 }
2437 else if (insn == 0xb1)
2438 {
2439 int mask = *entry++;
2440 int i;
2441
2442 /* All-zero mask and mask >= 16 is "spare". */
2443 if (mask == 0 || mask >= 16)
2444 return NULL;
2445
2446 /* Pop r0..r3 under mask. */
2447 for (i = 0; i < 4; i++)
2448 if (mask & (1 << i))
2449 {
2450 cache->saved_regs[i].addr = vsp;
2451 vsp += 4;
2452 }
2453 }
2454 else if (insn == 0xb2)
2455 {
2456 ULONGEST offset = 0;
2457 unsigned shift = 0;
2458
2459 do
2460 {
2461 offset |= (*entry & 0x7f) << shift;
2462 shift += 7;
2463 }
2464 while (*entry++ & 0x80);
2465
2466 vsp += 0x204 + (offset << 2);
2467 }
2468 else if (insn == 0xb3)
2469 {
2470 int start = *entry >> 4;
2471 int count = (*entry++) & 0xf;
2472 int i;
2473
2474 /* Only registers D0..D15 are valid here. */
2475 if (start + count >= 16)
2476 return NULL;
2477
2478 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if ((insn & 0xf8) == 0xb8)
2489 {
2490 int count = insn & 0x7;
2491 int i;
2492
2493 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2494 for (i = 0; i <= count; i++)
2495 {
2496 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2497 vsp += 8;
2498 }
2499
2500 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2501 vsp += 4;
2502 }
2503 else if (insn == 0xc6)
2504 {
2505 int start = *entry >> 4;
2506 int count = (*entry++) & 0xf;
2507 int i;
2508
2509 /* Only registers WR0..WR15 are valid. */
2510 if (start + count >= 16)
2511 return NULL;
2512
2513 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc7)
2521 {
2522 int mask = *entry++;
2523 int i;
2524
2525 /* All-zero mask and mask >= 16 is "spare". */
2526 if (mask == 0 || mask >= 16)
2527 return NULL;
2528
2529 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2530 for (i = 0; i < 4; i++)
2531 if (mask & (1 << i))
2532 {
2533 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2534 vsp += 4;
2535 }
2536 }
2537 else if ((insn & 0xf8) == 0xc0)
2538 {
2539 int count = insn & 0x7;
2540 int i;
2541
2542 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2543 for (i = 0; i <= count; i++)
2544 {
2545 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2546 vsp += 8;
2547 }
2548 }
2549 else if (insn == 0xc8)
2550 {
2551 int start = *entry >> 4;
2552 int count = (*entry++) & 0xf;
2553 int i;
2554
2555 /* Only registers D0..D31 are valid. */
2556 if (start + count >= 16)
2557 return NULL;
2558
2559 /* Pop VFP double-precision registers
2560 D[16+start]..D[16+start+count]. */
2561 for (i = 0; i <= count; i++)
2562 {
2563 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2564 vsp += 8;
2565 }
2566 }
2567 else if (insn == 0xc9)
2568 {
2569 int start = *entry >> 4;
2570 int count = (*entry++) & 0xf;
2571 int i;
2572
2573 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2574 for (i = 0; i <= count; i++)
2575 {
2576 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2577 vsp += 8;
2578 }
2579 }
2580 else if ((insn & 0xf8) == 0xd0)
2581 {
2582 int count = insn & 0x7;
2583 int i;
2584
2585 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2586 for (i = 0; i <= count; i++)
2587 {
2588 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2589 vsp += 8;
2590 }
2591 }
2592 else
2593 {
2594 /* Everything else is "spare". */
2595 return NULL;
2596 }
2597 }
2598
2599 /* If we restore SP from a register, assume this was the frame register.
2600 Otherwise just fall back to SP as frame register. */
2601 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2602 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2603 else
2604 cache->framereg = ARM_SP_REGNUM;
2605
2606 /* Determine offset to previous frame. */
2607 cache->framesize
2608 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2609
2610 /* We already got the previous SP. */
2611 cache->prev_sp = vsp;
2612
2613 return cache;
2614 }
2615
2616 /* Unwinding via ARM exception table entries. Note that the sniffer
2617 already computes a filled-in prologue cache, which is then used
2618 with the same arm_prologue_this_id and arm_prologue_prev_register
2619 routines also used for prologue-parsing based unwinding. */
2620
2621 static int
2622 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2623 struct frame_info *this_frame,
2624 void **this_prologue_cache)
2625 {
2626 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2628 CORE_ADDR addr_in_block, exidx_region, func_start;
2629 struct arm_prologue_cache *cache;
2630 gdb_byte *entry;
2631
2632 /* See if we have an ARM exception table entry covering this address. */
2633 addr_in_block = get_frame_address_in_block (this_frame);
2634 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2635 if (!entry)
2636 return 0;
2637
2638 /* The ARM exception table does not describe unwind information
2639 for arbitrary PC values, but is guaranteed to be correct only
2640 at call sites. We have to decide here whether we want to use
2641 ARM exception table information for this frame, or fall back
2642 to using prologue parsing. (Note that if we have DWARF CFI,
2643 this sniffer isn't even called -- CFI is always preferred.)
2644
2645 Before we make this decision, however, we check whether we
2646 actually have *symbol* information for the current frame.
2647 If not, prologue parsing would not work anyway, so we might
2648 as well use the exception table and hope for the best. */
2649 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2650 {
2651 int exc_valid = 0;
2652
2653 /* If the next frame is "normal", we are at a call site in this
2654 frame, so exception information is guaranteed to be valid. */
2655 if (get_next_frame (this_frame)
2656 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2657 exc_valid = 1;
2658
2659 /* We also assume exception information is valid if we're currently
2660 blocked in a system call. The system library is supposed to
2661 ensure this, so that e.g. pthread cancellation works. */
2662 if (arm_frame_is_thumb (this_frame))
2663 {
2664 ULONGEST insn;
2665
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2667 2, byte_order_for_code, &insn)
2668 && (insn & 0xff00) == 0xdf00 /* svc */)
2669 exc_valid = 1;
2670 }
2671 else
2672 {
2673 ULONGEST insn;
2674
2675 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2676 4, byte_order_for_code, &insn)
2677 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2678 exc_valid = 1;
2679 }
2680
2681 /* Bail out if we don't know that exception information is valid. */
2682 if (!exc_valid)
2683 return 0;
2684
2685 /* The ARM exception index does not mark the *end* of the region
2686 covered by the entry, and some functions will not have any entry.
2687 To correctly recognize the end of the covered region, the linker
2688 should have inserted dummy records with a CANTUNWIND marker.
2689
2690 Unfortunately, current versions of GNU ld do not reliably do
2691 this, and thus we may have found an incorrect entry above.
2692 As a (temporary) sanity check, we only use the entry if it
2693 lies *within* the bounds of the function. Note that this check
2694 might reject perfectly valid entries that just happen to cover
2695 multiple functions; therefore this check ought to be removed
2696 once the linker is fixed. */
2697 if (func_start > exidx_region)
2698 return 0;
2699 }
2700
2701 /* Decode the list of unwinding instructions into a prologue cache.
2702 Note that this may fail due to e.g. a "refuse to unwind" code. */
2703 cache = arm_exidx_fill_cache (this_frame, entry);
2704 if (!cache)
2705 return 0;
2706
2707 *this_prologue_cache = cache;
2708 return 1;
2709 }
2710
2711 struct frame_unwind arm_exidx_unwind = {
2712 NORMAL_FRAME,
2713 default_frame_unwind_stop_reason,
2714 arm_prologue_this_id,
2715 arm_prologue_prev_register,
2716 NULL,
2717 arm_exidx_unwind_sniffer
2718 };
2719
2720 static struct arm_prologue_cache *
2721 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2722 {
2723 struct arm_prologue_cache *cache;
2724 int reg;
2725
2726 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2727 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2728
2729 /* Still rely on the offset calculated from prologue. */
2730 arm_scan_prologue (this_frame, cache);
2731
2732 /* Since we are in epilogue, the SP has been restored. */
2733 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2734
2735 /* Calculate actual addresses of saved registers using offsets
2736 determined by arm_scan_prologue. */
2737 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2738 if (trad_frame_addr_p (cache->saved_regs, reg))
2739 cache->saved_regs[reg].addr += cache->prev_sp;
2740
2741 return cache;
2742 }
2743
2744 /* Implementation of function hook 'this_id' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2746
2747 static void
2748 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2749 void **this_cache,
2750 struct frame_id *this_id)
2751 {
2752 struct arm_prologue_cache *cache;
2753 CORE_ADDR pc, func;
2754
2755 if (*this_cache == NULL)
2756 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2757 cache = (struct arm_prologue_cache *) *this_cache;
2758
2759 /* Use function start address as part of the frame ID. If we cannot
2760 identify the start address (due to missing symbol information),
2761 fall back to just using the current PC. */
2762 pc = get_frame_pc (this_frame);
2763 func = get_frame_func (this_frame);
2764 if (func == 0)
2765 func = pc;
2766
2767 (*this_id) = frame_id_build (cache->prev_sp, pc);
2768 }
2769
2770 /* Implementation of function hook 'prev_register' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2772
2773 static struct value *
2774 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2775 void **this_cache, int regnum)
2776 {
2777 if (*this_cache == NULL)
2778 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2779
2780 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2781 }
2782
2783 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2784 CORE_ADDR pc);
2785 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2786 CORE_ADDR pc);
2787
2788 /* Implementation of function hook 'sniffer' in
2789 'struct frame_uwnind' for epilogue unwinder. */
2790
2791 static int
2792 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2793 struct frame_info *this_frame,
2794 void **this_prologue_cache)
2795 {
2796 if (frame_relative_level (this_frame) == 0)
2797 {
2798 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2799 CORE_ADDR pc = get_frame_pc (this_frame);
2800
2801 if (arm_frame_is_thumb (this_frame))
2802 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2803 else
2804 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2805 }
2806 else
2807 return 0;
2808 }
2809
2810 /* Frame unwinder from epilogue. */
2811
2812 static const struct frame_unwind arm_epilogue_frame_unwind =
2813 {
2814 NORMAL_FRAME,
2815 default_frame_unwind_stop_reason,
2816 arm_epilogue_frame_this_id,
2817 arm_epilogue_frame_prev_register,
2818 NULL,
2819 arm_epilogue_frame_sniffer,
2820 };
2821
2822 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2823 trampoline, return the target PC. Otherwise return 0.
2824
2825 void call0a (char c, short s, int i, long l) {}
2826
2827 int main (void)
2828 {
2829 (*pointer_to_call0a) (c, s, i, l);
2830 }
2831
2832 Instead of calling a stub library function _call_via_xx (xx is
2833 the register name), GCC may inline the trampoline in the object
2834 file as below (register r2 has the address of call0a).
2835
2836 .global main
2837 .type main, %function
2838 ...
2839 bl .L1
2840 ...
2841 .size main, .-main
2842
2843 .L1:
2844 bx r2
2845
2846 The trampoline 'bx r2' doesn't belong to main. */
2847
2848 static CORE_ADDR
2849 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2850 {
2851 /* The heuristics of recognizing such trampoline is that FRAME is
2852 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2853 if (arm_frame_is_thumb (frame))
2854 {
2855 gdb_byte buf[2];
2856
2857 if (target_read_memory (pc, buf, 2) == 0)
2858 {
2859 struct gdbarch *gdbarch = get_frame_arch (frame);
2860 enum bfd_endian byte_order_for_code
2861 = gdbarch_byte_order_for_code (gdbarch);
2862 uint16_t insn
2863 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2864
2865 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2866 {
2867 CORE_ADDR dest
2868 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2869
2870 /* Clear the LSB so that gdb core sets step-resume
2871 breakpoint at the right address. */
2872 return UNMAKE_THUMB_ADDR (dest);
2873 }
2874 }
2875 }
2876
2877 return 0;
2878 }
2879
2880 static struct arm_prologue_cache *
2881 arm_make_stub_cache (struct frame_info *this_frame)
2882 {
2883 struct arm_prologue_cache *cache;
2884
2885 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2886 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2887
2888 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2889
2890 return cache;
2891 }
2892
2893 /* Our frame ID for a stub frame is the current SP and LR. */
2894
2895 static void
2896 arm_stub_this_id (struct frame_info *this_frame,
2897 void **this_cache,
2898 struct frame_id *this_id)
2899 {
2900 struct arm_prologue_cache *cache;
2901
2902 if (*this_cache == NULL)
2903 *this_cache = arm_make_stub_cache (this_frame);
2904 cache = (struct arm_prologue_cache *) *this_cache;
2905
2906 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2907 }
2908
2909 static int
2910 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2911 struct frame_info *this_frame,
2912 void **this_prologue_cache)
2913 {
2914 CORE_ADDR addr_in_block;
2915 gdb_byte dummy[4];
2916 CORE_ADDR pc, start_addr;
2917 const char *name;
2918
2919 addr_in_block = get_frame_address_in_block (this_frame);
2920 pc = get_frame_pc (this_frame);
2921 if (in_plt_section (addr_in_block)
2922 /* We also use the stub winder if the target memory is unreadable
2923 to avoid having the prologue unwinder trying to read it. */
2924 || target_read_memory (pc, dummy, 4) != 0)
2925 return 1;
2926
2927 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2928 && arm_skip_bx_reg (this_frame, pc) != 0)
2929 return 1;
2930
2931 return 0;
2932 }
2933
2934 struct frame_unwind arm_stub_unwind = {
2935 NORMAL_FRAME,
2936 default_frame_unwind_stop_reason,
2937 arm_stub_this_id,
2938 arm_prologue_prev_register,
2939 NULL,
2940 arm_stub_unwind_sniffer
2941 };
2942
2943 /* Put here the code to store, into CACHE->saved_regs, the addresses
2944 of the saved registers of frame described by THIS_FRAME. CACHE is
2945 returned. */
2946
2947 static struct arm_prologue_cache *
2948 arm_m_exception_cache (struct frame_info *this_frame)
2949 {
2950 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2951 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2952 struct arm_prologue_cache *cache;
2953 CORE_ADDR unwound_sp;
2954 LONGEST xpsr;
2955
2956 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2957 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2958
2959 unwound_sp = get_frame_register_unsigned (this_frame,
2960 ARM_SP_REGNUM);
2961
2962 /* The hardware saves eight 32-bit words, comprising xPSR,
2963 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2964 "B1.5.6 Exception entry behavior" in
2965 "ARMv7-M Architecture Reference Manual". */
2966 cache->saved_regs[0].addr = unwound_sp;
2967 cache->saved_regs[1].addr = unwound_sp + 4;
2968 cache->saved_regs[2].addr = unwound_sp + 8;
2969 cache->saved_regs[3].addr = unwound_sp + 12;
2970 cache->saved_regs[12].addr = unwound_sp + 16;
2971 cache->saved_regs[14].addr = unwound_sp + 20;
2972 cache->saved_regs[15].addr = unwound_sp + 24;
2973 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2974
2975 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2976 aligner between the top of the 32-byte stack frame and the
2977 previous context's stack pointer. */
2978 cache->prev_sp = unwound_sp + 32;
2979 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2980 && (xpsr & (1 << 9)) != 0)
2981 cache->prev_sp += 4;
2982
2983 return cache;
2984 }
2985
2986 /* Implementation of function hook 'this_id' in
2987 'struct frame_uwnind'. */
2988
2989 static void
2990 arm_m_exception_this_id (struct frame_info *this_frame,
2991 void **this_cache,
2992 struct frame_id *this_id)
2993 {
2994 struct arm_prologue_cache *cache;
2995
2996 if (*this_cache == NULL)
2997 *this_cache = arm_m_exception_cache (this_frame);
2998 cache = (struct arm_prologue_cache *) *this_cache;
2999
3000 /* Our frame ID for a stub frame is the current SP and LR. */
3001 *this_id = frame_id_build (cache->prev_sp,
3002 get_frame_pc (this_frame));
3003 }
3004
3005 /* Implementation of function hook 'prev_register' in
3006 'struct frame_uwnind'. */
3007
3008 static struct value *
3009 arm_m_exception_prev_register (struct frame_info *this_frame,
3010 void **this_cache,
3011 int prev_regnum)
3012 {
3013 struct arm_prologue_cache *cache;
3014
3015 if (*this_cache == NULL)
3016 *this_cache = arm_m_exception_cache (this_frame);
3017 cache = (struct arm_prologue_cache *) *this_cache;
3018
3019 /* The value was already reconstructed into PREV_SP. */
3020 if (prev_regnum == ARM_SP_REGNUM)
3021 return frame_unwind_got_constant (this_frame, prev_regnum,
3022 cache->prev_sp);
3023
3024 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3025 prev_regnum);
3026 }
3027
3028 /* Implementation of function hook 'sniffer' in
3029 'struct frame_uwnind'. */
3030
3031 static int
3032 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3033 struct frame_info *this_frame,
3034 void **this_prologue_cache)
3035 {
3036 CORE_ADDR this_pc = get_frame_pc (this_frame);
3037
3038 /* No need to check is_m; this sniffer is only registered for
3039 M-profile architectures. */
3040
3041 /* Check if exception frame returns to a magic PC value. */
3042 return arm_m_addr_is_magic (this_pc);
3043 }
3044
3045 /* Frame unwinder for M-profile exceptions. */
3046
3047 struct frame_unwind arm_m_exception_unwind =
3048 {
3049 SIGTRAMP_FRAME,
3050 default_frame_unwind_stop_reason,
3051 arm_m_exception_this_id,
3052 arm_m_exception_prev_register,
3053 NULL,
3054 arm_m_exception_unwind_sniffer
3055 };
3056
3057 static CORE_ADDR
3058 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3059 {
3060 struct arm_prologue_cache *cache;
3061
3062 if (*this_cache == NULL)
3063 *this_cache = arm_make_prologue_cache (this_frame);
3064 cache = (struct arm_prologue_cache *) *this_cache;
3065
3066 return cache->prev_sp - cache->framesize;
3067 }
3068
3069 struct frame_base arm_normal_base = {
3070 &arm_prologue_unwind,
3071 arm_normal_frame_base,
3072 arm_normal_frame_base,
3073 arm_normal_frame_base
3074 };
3075
3076 static struct value *
3077 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3078 int regnum)
3079 {
3080 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3081 CORE_ADDR lr, cpsr;
3082 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3083
3084 switch (regnum)
3085 {
3086 case ARM_PC_REGNUM:
3087 /* The PC is normally copied from the return column, which
3088 describes saves of LR. However, that version may have an
3089 extra bit set to indicate Thumb state. The bit is not
3090 part of the PC. */
3091 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3092 return frame_unwind_got_constant (this_frame, regnum,
3093 arm_addr_bits_remove (gdbarch, lr));
3094
3095 case ARM_PS_REGNUM:
3096 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3097 cpsr = get_frame_register_unsigned (this_frame, regnum);
3098 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3099 if (IS_THUMB_ADDR (lr))
3100 cpsr |= t_bit;
3101 else
3102 cpsr &= ~t_bit;
3103 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3104
3105 default:
3106 internal_error (__FILE__, __LINE__,
3107 _("Unexpected register %d"), regnum);
3108 }
3109 }
3110
3111 static void
3112 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3113 struct dwarf2_frame_state_reg *reg,
3114 struct frame_info *this_frame)
3115 {
3116 switch (regnum)
3117 {
3118 case ARM_PC_REGNUM:
3119 case ARM_PS_REGNUM:
3120 reg->how = DWARF2_FRAME_REG_FN;
3121 reg->loc.fn = arm_dwarf2_prev_register;
3122 break;
3123 case ARM_SP_REGNUM:
3124 reg->how = DWARF2_FRAME_REG_CFA;
3125 break;
3126 }
3127 }
3128
3129 /* Implement the stack_frame_destroyed_p gdbarch method. */
3130
3131 static int
3132 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3133 {
3134 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3135 unsigned int insn, insn2;
3136 int found_return = 0, found_stack_adjust = 0;
3137 CORE_ADDR func_start, func_end;
3138 CORE_ADDR scan_pc;
3139 gdb_byte buf[4];
3140
3141 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3142 return 0;
3143
3144 /* The epilogue is a sequence of instructions along the following lines:
3145
3146 - add stack frame size to SP or FP
3147 - [if frame pointer used] restore SP from FP
3148 - restore registers from SP [may include PC]
3149 - a return-type instruction [if PC wasn't already restored]
3150
3151 In a first pass, we scan forward from the current PC and verify the
3152 instructions we find as compatible with this sequence, ending in a
3153 return instruction.
3154
3155 However, this is not sufficient to distinguish indirect function calls
3156 within a function from indirect tail calls in the epilogue in some cases.
3157 Therefore, if we didn't already find any SP-changing instruction during
3158 forward scan, we add a backward scanning heuristic to ensure we actually
3159 are in the epilogue. */
3160
3161 scan_pc = pc;
3162 while (scan_pc < func_end && !found_return)
3163 {
3164 if (target_read_memory (scan_pc, buf, 2))
3165 break;
3166
3167 scan_pc += 2;
3168 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3169
3170 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3171 found_return = 1;
3172 else if (insn == 0x46f7) /* mov pc, lr */
3173 found_return = 1;
3174 else if (thumb_instruction_restores_sp (insn))
3175 {
3176 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3177 found_return = 1;
3178 }
3179 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3180 {
3181 if (target_read_memory (scan_pc, buf, 2))
3182 break;
3183
3184 scan_pc += 2;
3185 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3186
3187 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3188 {
3189 if (insn2 & 0x8000) /* <registers> include PC. */
3190 found_return = 1;
3191 }
3192 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3193 && (insn2 & 0x0fff) == 0x0b04)
3194 {
3195 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3196 found_return = 1;
3197 }
3198 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3199 && (insn2 & 0x0e00) == 0x0a00)
3200 ;
3201 else
3202 break;
3203 }
3204 else
3205 break;
3206 }
3207
3208 if (!found_return)
3209 return 0;
3210
3211 /* Since any instruction in the epilogue sequence, with the possible
3212 exception of return itself, updates the stack pointer, we need to
3213 scan backwards for at most one instruction. Try either a 16-bit or
3214 a 32-bit instruction. This is just a heuristic, so we do not worry
3215 too much about false positives. */
3216
3217 if (pc - 4 < func_start)
3218 return 0;
3219 if (target_read_memory (pc - 4, buf, 4))
3220 return 0;
3221
3222 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3223 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3224
3225 if (thumb_instruction_restores_sp (insn2))
3226 found_stack_adjust = 1;
3227 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3228 found_stack_adjust = 1;
3229 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3230 && (insn2 & 0x0fff) == 0x0b04)
3231 found_stack_adjust = 1;
3232 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3233 && (insn2 & 0x0e00) == 0x0a00)
3234 found_stack_adjust = 1;
3235
3236 return found_stack_adjust;
3237 }
3238
3239 static int
3240 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3241 {
3242 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3243 unsigned int insn;
3244 int found_return;
3245 CORE_ADDR func_start, func_end;
3246
3247 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3248 return 0;
3249
3250 /* We are in the epilogue if the previous instruction was a stack
3251 adjustment and the next instruction is a possible return (bx, mov
3252 pc, or pop). We could have to scan backwards to find the stack
3253 adjustment, or forwards to find the return, but this is a decent
3254 approximation. First scan forwards. */
3255
3256 found_return = 0;
3257 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3258 if (bits (insn, 28, 31) != INST_NV)
3259 {
3260 if ((insn & 0x0ffffff0) == 0x012fff10)
3261 /* BX. */
3262 found_return = 1;
3263 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3264 /* MOV PC. */
3265 found_return = 1;
3266 else if ((insn & 0x0fff0000) == 0x08bd0000
3267 && (insn & 0x0000c000) != 0)
3268 /* POP (LDMIA), including PC or LR. */
3269 found_return = 1;
3270 }
3271
3272 if (!found_return)
3273 return 0;
3274
3275 /* Scan backwards. This is just a heuristic, so do not worry about
3276 false positives from mode changes. */
3277
3278 if (pc < func_start + 4)
3279 return 0;
3280
3281 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3282 if (arm_instruction_restores_sp (insn))
3283 return 1;
3284
3285 return 0;
3286 }
3287
3288 /* Implement the stack_frame_destroyed_p gdbarch method. */
3289
3290 static int
3291 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3292 {
3293 if (arm_pc_is_thumb (gdbarch, pc))
3294 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3295 else
3296 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3297 }
3298
3299 /* When arguments must be pushed onto the stack, they go on in reverse
3300 order. The code below implements a FILO (stack) to do this. */
3301
3302 struct stack_item
3303 {
3304 int len;
3305 struct stack_item *prev;
3306 gdb_byte *data;
3307 };
3308
3309 static struct stack_item *
3310 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3311 {
3312 struct stack_item *si;
3313 si = XNEW (struct stack_item);
3314 si->data = (gdb_byte *) xmalloc (len);
3315 si->len = len;
3316 si->prev = prev;
3317 memcpy (si->data, contents, len);
3318 return si;
3319 }
3320
3321 static struct stack_item *
3322 pop_stack_item (struct stack_item *si)
3323 {
3324 struct stack_item *dead = si;
3325 si = si->prev;
3326 xfree (dead->data);
3327 xfree (dead);
3328 return si;
3329 }
3330
3331 /* Implement the gdbarch type alignment method, overrides the generic
3332 alignment algorithm for anything that is arm specific. */
3333
3334 static ULONGEST
3335 arm_type_align (gdbarch *gdbarch, struct type *t)
3336 {
3337 t = check_typedef (t);
3338 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3339 {
3340 /* Use the natural alignment for vector types (the same for
3341 scalar type), but the maximum alignment is 64-bit. */
3342 if (TYPE_LENGTH (t) > 8)
3343 return 8;
3344 else
3345 return TYPE_LENGTH (t);
3346 }
3347
3348 /* Allow the common code to calculate the alignment. */
3349 return 0;
3350 }
3351
3352 /* Possible base types for a candidate for passing and returning in
3353 VFP registers. */
3354
3355 enum arm_vfp_cprc_base_type
3356 {
3357 VFP_CPRC_UNKNOWN,
3358 VFP_CPRC_SINGLE,
3359 VFP_CPRC_DOUBLE,
3360 VFP_CPRC_VEC64,
3361 VFP_CPRC_VEC128
3362 };
3363
3364 /* The length of one element of base type B. */
3365
3366 static unsigned
3367 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3368 {
3369 switch (b)
3370 {
3371 case VFP_CPRC_SINGLE:
3372 return 4;
3373 case VFP_CPRC_DOUBLE:
3374 return 8;
3375 case VFP_CPRC_VEC64:
3376 return 8;
3377 case VFP_CPRC_VEC128:
3378 return 16;
3379 default:
3380 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3381 (int) b);
3382 }
3383 }
3384
3385 /* The character ('s', 'd' or 'q') for the type of VFP register used
3386 for passing base type B. */
3387
3388 static int
3389 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3390 {
3391 switch (b)
3392 {
3393 case VFP_CPRC_SINGLE:
3394 return 's';
3395 case VFP_CPRC_DOUBLE:
3396 return 'd';
3397 case VFP_CPRC_VEC64:
3398 return 'd';
3399 case VFP_CPRC_VEC128:
3400 return 'q';
3401 default:
3402 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3403 (int) b);
3404 }
3405 }
3406
3407 /* Determine whether T may be part of a candidate for passing and
3408 returning in VFP registers, ignoring the limit on the total number
3409 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3410 classification of the first valid component found; if it is not
3411 VFP_CPRC_UNKNOWN, all components must have the same classification
3412 as *BASE_TYPE. If it is found that T contains a type not permitted
3413 for passing and returning in VFP registers, a type differently
3414 classified from *BASE_TYPE, or two types differently classified
3415 from each other, return -1, otherwise return the total number of
3416 base-type elements found (possibly 0 in an empty structure or
3417 array). Vector types are not currently supported, matching the
3418 generic AAPCS support. */
3419
3420 static int
3421 arm_vfp_cprc_sub_candidate (struct type *t,
3422 enum arm_vfp_cprc_base_type *base_type)
3423 {
3424 t = check_typedef (t);
3425 switch (TYPE_CODE (t))
3426 {
3427 case TYPE_CODE_FLT:
3428 switch (TYPE_LENGTH (t))
3429 {
3430 case 4:
3431 if (*base_type == VFP_CPRC_UNKNOWN)
3432 *base_type = VFP_CPRC_SINGLE;
3433 else if (*base_type != VFP_CPRC_SINGLE)
3434 return -1;
3435 return 1;
3436
3437 case 8:
3438 if (*base_type == VFP_CPRC_UNKNOWN)
3439 *base_type = VFP_CPRC_DOUBLE;
3440 else if (*base_type != VFP_CPRC_DOUBLE)
3441 return -1;
3442 return 1;
3443
3444 default:
3445 return -1;
3446 }
3447 break;
3448
3449 case TYPE_CODE_COMPLEX:
3450 /* Arguments of complex T where T is one of the types float or
3451 double get treated as if they are implemented as:
3452
3453 struct complexT
3454 {
3455 T real;
3456 T imag;
3457 };
3458
3459 */
3460 switch (TYPE_LENGTH (t))
3461 {
3462 case 8:
3463 if (*base_type == VFP_CPRC_UNKNOWN)
3464 *base_type = VFP_CPRC_SINGLE;
3465 else if (*base_type != VFP_CPRC_SINGLE)
3466 return -1;
3467 return 2;
3468
3469 case 16:
3470 if (*base_type == VFP_CPRC_UNKNOWN)
3471 *base_type = VFP_CPRC_DOUBLE;
3472 else if (*base_type != VFP_CPRC_DOUBLE)
3473 return -1;
3474 return 2;
3475
3476 default:
3477 return -1;
3478 }
3479 break;
3480
3481 case TYPE_CODE_ARRAY:
3482 {
3483 if (TYPE_VECTOR (t))
3484 {
3485 /* A 64-bit or 128-bit containerized vector type are VFP
3486 CPRCs. */
3487 switch (TYPE_LENGTH (t))
3488 {
3489 case 8:
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_VEC64;
3492 return 1;
3493 case 16:
3494 if (*base_type == VFP_CPRC_UNKNOWN)
3495 *base_type = VFP_CPRC_VEC128;
3496 return 1;
3497 default:
3498 return -1;
3499 }
3500 }
3501 else
3502 {
3503 int count;
3504 unsigned unitlen;
3505
3506 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3507 base_type);
3508 if (count == -1)
3509 return -1;
3510 if (TYPE_LENGTH (t) == 0)
3511 {
3512 gdb_assert (count == 0);
3513 return 0;
3514 }
3515 else if (count == 0)
3516 return -1;
3517 unitlen = arm_vfp_cprc_unit_length (*base_type);
3518 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3519 return TYPE_LENGTH (t) / unitlen;
3520 }
3521 }
3522 break;
3523
3524 case TYPE_CODE_STRUCT:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < TYPE_NFIELDS (t); i++)
3530 {
3531 int sub_count = 0;
3532
3533 if (!field_is_static (&TYPE_FIELD (t, i)))
3534 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3535 base_type);
3536 if (sub_count == -1)
3537 return -1;
3538 count += sub_count;
3539 }
3540 if (TYPE_LENGTH (t) == 0)
3541 {
3542 gdb_assert (count == 0);
3543 return 0;
3544 }
3545 else if (count == 0)
3546 return -1;
3547 unitlen = arm_vfp_cprc_unit_length (*base_type);
3548 if (TYPE_LENGTH (t) != unitlen * count)
3549 return -1;
3550 return count;
3551 }
3552
3553 case TYPE_CODE_UNION:
3554 {
3555 int count = 0;
3556 unsigned unitlen;
3557 int i;
3558 for (i = 0; i < TYPE_NFIELDS (t); i++)
3559 {
3560 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3561 base_type);
3562 if (sub_count == -1)
3563 return -1;
3564 count = (count > sub_count ? count : sub_count);
3565 }
3566 if (TYPE_LENGTH (t) == 0)
3567 {
3568 gdb_assert (count == 0);
3569 return 0;
3570 }
3571 else if (count == 0)
3572 return -1;
3573 unitlen = arm_vfp_cprc_unit_length (*base_type);
3574 if (TYPE_LENGTH (t) != unitlen * count)
3575 return -1;
3576 return count;
3577 }
3578
3579 default:
3580 break;
3581 }
3582
3583 return -1;
3584 }
3585
3586 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3587 if passed to or returned from a non-variadic function with the VFP
3588 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3589 *BASE_TYPE to the base type for T and *COUNT to the number of
3590 elements of that base type before returning. */
3591
3592 static int
3593 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3594 int *count)
3595 {
3596 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3597 int c = arm_vfp_cprc_sub_candidate (t, &b);
3598 if (c <= 0 || c > 4)
3599 return 0;
3600 *base_type = b;
3601 *count = c;
3602 return 1;
3603 }
3604
3605 /* Return 1 if the VFP ABI should be used for passing arguments to and
3606 returning values from a function of type FUNC_TYPE, 0
3607 otherwise. */
3608
3609 static int
3610 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3611 {
3612 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3613 /* Variadic functions always use the base ABI. Assume that functions
3614 without debug info are not variadic. */
3615 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3616 return 0;
3617 /* The VFP ABI is only supported as a variant of AAPCS. */
3618 if (tdep->arm_abi != ARM_ABI_AAPCS)
3619 return 0;
3620 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3621 }
3622
3623 /* We currently only support passing parameters in integer registers, which
3624 conforms with GCC's default model, and VFP argument passing following
3625 the VFP variant of AAPCS. Several other variants exist and
3626 we should probably support some of them based on the selected ABI. */
3627
3628 static CORE_ADDR
3629 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3630 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3631 struct value **args, CORE_ADDR sp,
3632 function_call_return_method return_method,
3633 CORE_ADDR struct_addr)
3634 {
3635 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3636 int argnum;
3637 int argreg;
3638 int nstack;
3639 struct stack_item *si = NULL;
3640 int use_vfp_abi;
3641 struct type *ftype;
3642 unsigned vfp_regs_free = (1 << 16) - 1;
3643
3644 /* Determine the type of this function and whether the VFP ABI
3645 applies. */
3646 ftype = check_typedef (value_type (function));
3647 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3648 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3649 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3650
3651 /* Set the return address. For the ARM, the return breakpoint is
3652 always at BP_ADDR. */
3653 if (arm_pc_is_thumb (gdbarch, bp_addr))
3654 bp_addr |= 1;
3655 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3656
3657 /* Walk through the list of args and determine how large a temporary
3658 stack is required. Need to take care here as structs may be
3659 passed on the stack, and we have to push them. */
3660 nstack = 0;
3661
3662 argreg = ARM_A1_REGNUM;
3663 nstack = 0;
3664
3665 /* The struct_return pointer occupies the first parameter
3666 passing register. */
3667 if (return_method == return_method_struct)
3668 {
3669 if (arm_debug)
3670 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3671 gdbarch_register_name (gdbarch, argreg),
3672 paddress (gdbarch, struct_addr));
3673 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3674 argreg++;
3675 }
3676
3677 for (argnum = 0; argnum < nargs; argnum++)
3678 {
3679 int len;
3680 struct type *arg_type;
3681 struct type *target_type;
3682 enum type_code typecode;
3683 const bfd_byte *val;
3684 int align;
3685 enum arm_vfp_cprc_base_type vfp_base_type;
3686 int vfp_base_count;
3687 int may_use_core_reg = 1;
3688
3689 arg_type = check_typedef (value_type (args[argnum]));
3690 len = TYPE_LENGTH (arg_type);
3691 target_type = TYPE_TARGET_TYPE (arg_type);
3692 typecode = TYPE_CODE (arg_type);
3693 val = value_contents (args[argnum]);
3694
3695 align = type_align (arg_type);
3696 /* Round alignment up to a whole number of words. */
3697 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3698 /* Different ABIs have different maximum alignments. */
3699 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3700 {
3701 /* The APCS ABI only requires word alignment. */
3702 align = INT_REGISTER_SIZE;
3703 }
3704 else
3705 {
3706 /* The AAPCS requires at most doubleword alignment. */
3707 if (align > INT_REGISTER_SIZE * 2)
3708 align = INT_REGISTER_SIZE * 2;
3709 }
3710
3711 if (use_vfp_abi
3712 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3713 &vfp_base_count))
3714 {
3715 int regno;
3716 int unit_length;
3717 int shift;
3718 unsigned mask;
3719
3720 /* Because this is a CPRC it cannot go in a core register or
3721 cause a core register to be skipped for alignment.
3722 Either it goes in VFP registers and the rest of this loop
3723 iteration is skipped for this argument, or it goes on the
3724 stack (and the stack alignment code is correct for this
3725 case). */
3726 may_use_core_reg = 0;
3727
3728 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3729 shift = unit_length / 4;
3730 mask = (1 << (shift * vfp_base_count)) - 1;
3731 for (regno = 0; regno < 16; regno += shift)
3732 if (((vfp_regs_free >> regno) & mask) == mask)
3733 break;
3734
3735 if (regno < 16)
3736 {
3737 int reg_char;
3738 int reg_scaled;
3739 int i;
3740
3741 vfp_regs_free &= ~(mask << regno);
3742 reg_scaled = regno / shift;
3743 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3744 for (i = 0; i < vfp_base_count; i++)
3745 {
3746 char name_buf[4];
3747 int regnum;
3748 if (reg_char == 'q')
3749 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3750 val + i * unit_length);
3751 else
3752 {
3753 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3754 reg_char, reg_scaled + i);
3755 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3756 strlen (name_buf));
3757 regcache->cooked_write (regnum, val + i * unit_length);
3758 }
3759 }
3760 continue;
3761 }
3762 else
3763 {
3764 /* This CPRC could not go in VFP registers, so all VFP
3765 registers are now marked as used. */
3766 vfp_regs_free = 0;
3767 }
3768 }
3769
3770 /* Push stack padding for dowubleword alignment. */
3771 if (nstack & (align - 1))
3772 {
3773 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3774 nstack += INT_REGISTER_SIZE;
3775 }
3776
3777 /* Doubleword aligned quantities must go in even register pairs. */
3778 if (may_use_core_reg
3779 && argreg <= ARM_LAST_ARG_REGNUM
3780 && align > INT_REGISTER_SIZE
3781 && argreg & 1)
3782 argreg++;
3783
3784 /* If the argument is a pointer to a function, and it is a
3785 Thumb function, create a LOCAL copy of the value and set
3786 the THUMB bit in it. */
3787 if (TYPE_CODE_PTR == typecode
3788 && target_type != NULL
3789 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3790 {
3791 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3792 if (arm_pc_is_thumb (gdbarch, regval))
3793 {
3794 bfd_byte *copy = (bfd_byte *) alloca (len);
3795 store_unsigned_integer (copy, len, byte_order,
3796 MAKE_THUMB_ADDR (regval));
3797 val = copy;
3798 }
3799 }
3800
3801 /* Copy the argument to general registers or the stack in
3802 register-sized pieces. Large arguments are split between
3803 registers and stack. */
3804 while (len > 0)
3805 {
3806 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3807 CORE_ADDR regval
3808 = extract_unsigned_integer (val, partial_len, byte_order);
3809
3810 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3811 {
3812 /* The argument is being passed in a general purpose
3813 register. */
3814 if (byte_order == BFD_ENDIAN_BIG)
3815 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3816 if (arm_debug)
3817 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3818 argnum,
3819 gdbarch_register_name
3820 (gdbarch, argreg),
3821 phex (regval, INT_REGISTER_SIZE));
3822 regcache_cooked_write_unsigned (regcache, argreg, regval);
3823 argreg++;
3824 }
3825 else
3826 {
3827 gdb_byte buf[INT_REGISTER_SIZE];
3828
3829 memset (buf, 0, sizeof (buf));
3830 store_unsigned_integer (buf, partial_len, byte_order, regval);
3831
3832 /* Push the arguments onto the stack. */
3833 if (arm_debug)
3834 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3835 argnum, nstack);
3836 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3837 nstack += INT_REGISTER_SIZE;
3838 }
3839
3840 len -= partial_len;
3841 val += partial_len;
3842 }
3843 }
3844 /* If we have an odd number of words to push, then decrement the stack
3845 by one word now, so first stack argument will be dword aligned. */
3846 if (nstack & 4)
3847 sp -= 4;
3848
3849 while (si)
3850 {
3851 sp -= si->len;
3852 write_memory (sp, si->data, si->len);
3853 si = pop_stack_item (si);
3854 }
3855
3856 /* Finally, update teh SP register. */
3857 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3858
3859 return sp;
3860 }
3861
3862
3863 /* Always align the frame to an 8-byte boundary. This is required on
3864 some platforms and harmless on the rest. */
3865
3866 static CORE_ADDR
3867 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3868 {
3869 /* Align the stack to eight bytes. */
3870 return sp & ~ (CORE_ADDR) 7;
3871 }
3872
3873 static void
3874 print_fpu_flags (struct ui_file *file, int flags)
3875 {
3876 if (flags & (1 << 0))
3877 fputs_filtered ("IVO ", file);
3878 if (flags & (1 << 1))
3879 fputs_filtered ("DVZ ", file);
3880 if (flags & (1 << 2))
3881 fputs_filtered ("OFL ", file);
3882 if (flags & (1 << 3))
3883 fputs_filtered ("UFL ", file);
3884 if (flags & (1 << 4))
3885 fputs_filtered ("INX ", file);
3886 fputc_filtered ('\n', file);
3887 }
3888
3889 /* Print interesting information about the floating point processor
3890 (if present) or emulator. */
3891 static void
3892 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3893 struct frame_info *frame, const char *args)
3894 {
3895 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3896 int type;
3897
3898 type = (status >> 24) & 127;
3899 if (status & (1 << 31))
3900 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3901 else
3902 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3903 /* i18n: [floating point unit] mask */
3904 fputs_filtered (_("mask: "), file);
3905 print_fpu_flags (file, status >> 16);
3906 /* i18n: [floating point unit] flags */
3907 fputs_filtered (_("flags: "), file);
3908 print_fpu_flags (file, status);
3909 }
3910
3911 /* Construct the ARM extended floating point type. */
3912 static struct type *
3913 arm_ext_type (struct gdbarch *gdbarch)
3914 {
3915 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3916
3917 if (!tdep->arm_ext_type)
3918 tdep->arm_ext_type
3919 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3920 floatformats_arm_ext);
3921
3922 return tdep->arm_ext_type;
3923 }
3924
3925 static struct type *
3926 arm_neon_double_type (struct gdbarch *gdbarch)
3927 {
3928 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3929
3930 if (tdep->neon_double_type == NULL)
3931 {
3932 struct type *t, *elem;
3933
3934 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3935 TYPE_CODE_UNION);
3936 elem = builtin_type (gdbarch)->builtin_uint8;
3937 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3938 elem = builtin_type (gdbarch)->builtin_uint16;
3939 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3940 elem = builtin_type (gdbarch)->builtin_uint32;
3941 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3942 elem = builtin_type (gdbarch)->builtin_uint64;
3943 append_composite_type_field (t, "u64", elem);
3944 elem = builtin_type (gdbarch)->builtin_float;
3945 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3946 elem = builtin_type (gdbarch)->builtin_double;
3947 append_composite_type_field (t, "f64", elem);
3948
3949 TYPE_VECTOR (t) = 1;
3950 TYPE_NAME (t) = "neon_d";
3951 tdep->neon_double_type = t;
3952 }
3953
3954 return tdep->neon_double_type;
3955 }
3956
3957 /* FIXME: The vector types are not correctly ordered on big-endian
3958 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3959 bits of d0 - regardless of what unit size is being held in d0. So
3960 the offset of the first uint8 in d0 is 7, but the offset of the
3961 first float is 4. This code works as-is for little-endian
3962 targets. */
3963
3964 static struct type *
3965 arm_neon_quad_type (struct gdbarch *gdbarch)
3966 {
3967 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3968
3969 if (tdep->neon_quad_type == NULL)
3970 {
3971 struct type *t, *elem;
3972
3973 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3974 TYPE_CODE_UNION);
3975 elem = builtin_type (gdbarch)->builtin_uint8;
3976 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3977 elem = builtin_type (gdbarch)->builtin_uint16;
3978 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3979 elem = builtin_type (gdbarch)->builtin_uint32;
3980 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3981 elem = builtin_type (gdbarch)->builtin_uint64;
3982 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3983 elem = builtin_type (gdbarch)->builtin_float;
3984 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3985 elem = builtin_type (gdbarch)->builtin_double;
3986 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3987
3988 TYPE_VECTOR (t) = 1;
3989 TYPE_NAME (t) = "neon_q";
3990 tdep->neon_quad_type = t;
3991 }
3992
3993 return tdep->neon_quad_type;
3994 }
3995
3996 /* Return the GDB type object for the "standard" data type of data in
3997 register N. */
3998
3999 static struct type *
4000 arm_register_type (struct gdbarch *gdbarch, int regnum)
4001 {
4002 int num_regs = gdbarch_num_regs (gdbarch);
4003
4004 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4005 && regnum >= num_regs && regnum < num_regs + 32)
4006 return builtin_type (gdbarch)->builtin_float;
4007
4008 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4009 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4010 return arm_neon_quad_type (gdbarch);
4011
4012 /* If the target description has register information, we are only
4013 in this function so that we can override the types of
4014 double-precision registers for NEON. */
4015 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4016 {
4017 struct type *t = tdesc_register_type (gdbarch, regnum);
4018
4019 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4020 && TYPE_CODE (t) == TYPE_CODE_FLT
4021 && gdbarch_tdep (gdbarch)->have_neon)
4022 return arm_neon_double_type (gdbarch);
4023 else
4024 return t;
4025 }
4026
4027 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4028 {
4029 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4030 return builtin_type (gdbarch)->builtin_void;
4031
4032 return arm_ext_type (gdbarch);
4033 }
4034 else if (regnum == ARM_SP_REGNUM)
4035 return builtin_type (gdbarch)->builtin_data_ptr;
4036 else if (regnum == ARM_PC_REGNUM)
4037 return builtin_type (gdbarch)->builtin_func_ptr;
4038 else if (regnum >= ARRAY_SIZE (arm_register_names))
4039 /* These registers are only supported on targets which supply
4040 an XML description. */
4041 return builtin_type (gdbarch)->builtin_int0;
4042 else
4043 return builtin_type (gdbarch)->builtin_uint32;
4044 }
4045
4046 /* Map a DWARF register REGNUM onto the appropriate GDB register
4047 number. */
4048
4049 static int
4050 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4051 {
4052 /* Core integer regs. */
4053 if (reg >= 0 && reg <= 15)
4054 return reg;
4055
4056 /* Legacy FPA encoding. These were once used in a way which
4057 overlapped with VFP register numbering, so their use is
4058 discouraged, but GDB doesn't support the ARM toolchain
4059 which used them for VFP. */
4060 if (reg >= 16 && reg <= 23)
4061 return ARM_F0_REGNUM + reg - 16;
4062
4063 /* New assignments for the FPA registers. */
4064 if (reg >= 96 && reg <= 103)
4065 return ARM_F0_REGNUM + reg - 96;
4066
4067 /* WMMX register assignments. */
4068 if (reg >= 104 && reg <= 111)
4069 return ARM_WCGR0_REGNUM + reg - 104;
4070
4071 if (reg >= 112 && reg <= 127)
4072 return ARM_WR0_REGNUM + reg - 112;
4073
4074 if (reg >= 192 && reg <= 199)
4075 return ARM_WC0_REGNUM + reg - 192;
4076
4077 /* VFP v2 registers. A double precision value is actually
4078 in d1 rather than s2, but the ABI only defines numbering
4079 for the single precision registers. This will "just work"
4080 in GDB for little endian targets (we'll read eight bytes,
4081 starting in s0 and then progressing to s1), but will be
4082 reversed on big endian targets with VFP. This won't
4083 be a problem for the new Neon quad registers; you're supposed
4084 to use DW_OP_piece for those. */
4085 if (reg >= 64 && reg <= 95)
4086 {
4087 char name_buf[4];
4088
4089 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4090 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4091 strlen (name_buf));
4092 }
4093
4094 /* VFP v3 / Neon registers. This range is also used for VFP v2
4095 registers, except that it now describes d0 instead of s0. */
4096 if (reg >= 256 && reg <= 287)
4097 {
4098 char name_buf[4];
4099
4100 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4101 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4102 strlen (name_buf));
4103 }
4104
4105 return -1;
4106 }
4107
4108 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4109 static int
4110 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4111 {
4112 int reg = regnum;
4113 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4114
4115 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4116 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4117
4118 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4119 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4120
4121 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4122 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4123
4124 if (reg < NUM_GREGS)
4125 return SIM_ARM_R0_REGNUM + reg;
4126 reg -= NUM_GREGS;
4127
4128 if (reg < NUM_FREGS)
4129 return SIM_ARM_FP0_REGNUM + reg;
4130 reg -= NUM_FREGS;
4131
4132 if (reg < NUM_SREGS)
4133 return SIM_ARM_FPS_REGNUM + reg;
4134 reg -= NUM_SREGS;
4135
4136 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4137 }
4138
4139 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4140 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4141 NULL if an error occurs. BUF is freed. */
4142
4143 static gdb_byte *
4144 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4145 int old_len, int new_len)
4146 {
4147 gdb_byte *new_buf;
4148 int bytes_to_read = new_len - old_len;
4149
4150 new_buf = (gdb_byte *) xmalloc (new_len);
4151 memcpy (new_buf + bytes_to_read, buf, old_len);
4152 xfree (buf);
4153 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4154 {
4155 xfree (new_buf);
4156 return NULL;
4157 }
4158 return new_buf;
4159 }
4160
4161 /* An IT block is at most the 2-byte IT instruction followed by
4162 four 4-byte instructions. The furthest back we must search to
4163 find an IT block that affects the current instruction is thus
4164 2 + 3 * 4 == 14 bytes. */
4165 #define MAX_IT_BLOCK_PREFIX 14
4166
4167 /* Use a quick scan if there are more than this many bytes of
4168 code. */
4169 #define IT_SCAN_THRESHOLD 32
4170
4171 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4172 A breakpoint in an IT block may not be hit, depending on the
4173 condition flags. */
4174 static CORE_ADDR
4175 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4176 {
4177 gdb_byte *buf;
4178 char map_type;
4179 CORE_ADDR boundary, func_start;
4180 int buf_len;
4181 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4182 int i, any, last_it, last_it_count;
4183
4184 /* If we are using BKPT breakpoints, none of this is necessary. */
4185 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4186 return bpaddr;
4187
4188 /* ARM mode does not have this problem. */
4189 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4190 return bpaddr;
4191
4192 /* We are setting a breakpoint in Thumb code that could potentially
4193 contain an IT block. The first step is to find how much Thumb
4194 code there is; we do not need to read outside of known Thumb
4195 sequences. */
4196 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4197 if (map_type == 0)
4198 /* Thumb-2 code must have mapping symbols to have a chance. */
4199 return bpaddr;
4200
4201 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4202
4203 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4204 && func_start > boundary)
4205 boundary = func_start;
4206
4207 /* Search for a candidate IT instruction. We have to do some fancy
4208 footwork to distinguish a real IT instruction from the second
4209 half of a 32-bit instruction, but there is no need for that if
4210 there's no candidate. */
4211 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4212 if (buf_len == 0)
4213 /* No room for an IT instruction. */
4214 return bpaddr;
4215
4216 buf = (gdb_byte *) xmalloc (buf_len);
4217 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4218 return bpaddr;
4219 any = 0;
4220 for (i = 0; i < buf_len; i += 2)
4221 {
4222 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4223 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4224 {
4225 any = 1;
4226 break;
4227 }
4228 }
4229
4230 if (any == 0)
4231 {
4232 xfree (buf);
4233 return bpaddr;
4234 }
4235
4236 /* OK, the code bytes before this instruction contain at least one
4237 halfword which resembles an IT instruction. We know that it's
4238 Thumb code, but there are still two possibilities. Either the
4239 halfword really is an IT instruction, or it is the second half of
4240 a 32-bit Thumb instruction. The only way we can tell is to
4241 scan forwards from a known instruction boundary. */
4242 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4243 {
4244 int definite;
4245
4246 /* There's a lot of code before this instruction. Start with an
4247 optimistic search; it's easy to recognize halfwords that can
4248 not be the start of a 32-bit instruction, and use that to
4249 lock on to the instruction boundaries. */
4250 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4251 if (buf == NULL)
4252 return bpaddr;
4253 buf_len = IT_SCAN_THRESHOLD;
4254
4255 definite = 0;
4256 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4257 {
4258 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4259 if (thumb_insn_size (inst1) == 2)
4260 {
4261 definite = 1;
4262 break;
4263 }
4264 }
4265
4266 /* At this point, if DEFINITE, BUF[I] is the first place we
4267 are sure that we know the instruction boundaries, and it is far
4268 enough from BPADDR that we could not miss an IT instruction
4269 affecting BPADDR. If ! DEFINITE, give up - start from a
4270 known boundary. */
4271 if (! definite)
4272 {
4273 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4274 bpaddr - boundary);
4275 if (buf == NULL)
4276 return bpaddr;
4277 buf_len = bpaddr - boundary;
4278 i = 0;
4279 }
4280 }
4281 else
4282 {
4283 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4284 if (buf == NULL)
4285 return bpaddr;
4286 buf_len = bpaddr - boundary;
4287 i = 0;
4288 }
4289
4290 /* Scan forwards. Find the last IT instruction before BPADDR. */
4291 last_it = -1;
4292 last_it_count = 0;
4293 while (i < buf_len)
4294 {
4295 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4296 last_it_count--;
4297 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4298 {
4299 last_it = i;
4300 if (inst1 & 0x0001)
4301 last_it_count = 4;
4302 else if (inst1 & 0x0002)
4303 last_it_count = 3;
4304 else if (inst1 & 0x0004)
4305 last_it_count = 2;
4306 else
4307 last_it_count = 1;
4308 }
4309 i += thumb_insn_size (inst1);
4310 }
4311
4312 xfree (buf);
4313
4314 if (last_it == -1)
4315 /* There wasn't really an IT instruction after all. */
4316 return bpaddr;
4317
4318 if (last_it_count < 1)
4319 /* It was too far away. */
4320 return bpaddr;
4321
4322 /* This really is a trouble spot. Move the breakpoint to the IT
4323 instruction. */
4324 return bpaddr - buf_len + last_it;
4325 }
4326
4327 /* ARM displaced stepping support.
4328
4329 Generally ARM displaced stepping works as follows:
4330
4331 1. When an instruction is to be single-stepped, it is first decoded by
4332 arm_process_displaced_insn. Depending on the type of instruction, it is
4333 then copied to a scratch location, possibly in a modified form. The
4334 copy_* set of functions performs such modification, as necessary. A
4335 breakpoint is placed after the modified instruction in the scratch space
4336 to return control to GDB. Note in particular that instructions which
4337 modify the PC will no longer do so after modification.
4338
4339 2. The instruction is single-stepped, by setting the PC to the scratch
4340 location address, and resuming. Control returns to GDB when the
4341 breakpoint is hit.
4342
4343 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4344 function used for the current instruction. This function's job is to
4345 put the CPU/memory state back to what it would have been if the
4346 instruction had been executed unmodified in its original location. */
4347
4348 /* NOP instruction (mov r0, r0). */
4349 #define ARM_NOP 0xe1a00000
4350 #define THUMB_NOP 0x4600
4351
4352 /* Helper for register reads for displaced stepping. In particular, this
4353 returns the PC as it would be seen by the instruction at its original
4354 location. */
4355
4356 ULONGEST
4357 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4358 int regno)
4359 {
4360 ULONGEST ret;
4361 CORE_ADDR from = dsc->insn_addr;
4362
4363 if (regno == ARM_PC_REGNUM)
4364 {
4365 /* Compute pipeline offset:
4366 - When executing an ARM instruction, PC reads as the address of the
4367 current instruction plus 8.
4368 - When executing a Thumb instruction, PC reads as the address of the
4369 current instruction plus 4. */
4370
4371 if (!dsc->is_thumb)
4372 from += 8;
4373 else
4374 from += 4;
4375
4376 if (debug_displaced)
4377 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4378 (unsigned long) from);
4379 return (ULONGEST) from;
4380 }
4381 else
4382 {
4383 regcache_cooked_read_unsigned (regs, regno, &ret);
4384 if (debug_displaced)
4385 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4386 regno, (unsigned long) ret);
4387 return ret;
4388 }
4389 }
4390
4391 static int
4392 displaced_in_arm_mode (struct regcache *regs)
4393 {
4394 ULONGEST ps;
4395 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4396
4397 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4398
4399 return (ps & t_bit) == 0;
4400 }
4401
4402 /* Write to the PC as from a branch instruction. */
4403
4404 static void
4405 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4406 ULONGEST val)
4407 {
4408 if (!dsc->is_thumb)
4409 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4410 architecture versions < 6. */
4411 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4412 val & ~(ULONGEST) 0x3);
4413 else
4414 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4415 val & ~(ULONGEST) 0x1);
4416 }
4417
4418 /* Write to the PC as from a branch-exchange instruction. */
4419
4420 static void
4421 bx_write_pc (struct regcache *regs, ULONGEST val)
4422 {
4423 ULONGEST ps;
4424 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4425
4426 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4427
4428 if ((val & 1) == 1)
4429 {
4430 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4431 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4432 }
4433 else if ((val & 2) == 0)
4434 {
4435 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4436 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4437 }
4438 else
4439 {
4440 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4441 mode, align dest to 4 bytes). */
4442 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4443 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4444 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4445 }
4446 }
4447
4448 /* Write to the PC as if from a load instruction. */
4449
4450 static void
4451 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4452 ULONGEST val)
4453 {
4454 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4455 bx_write_pc (regs, val);
4456 else
4457 branch_write_pc (regs, dsc, val);
4458 }
4459
4460 /* Write to the PC as if from an ALU instruction. */
4461
4462 static void
4463 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4464 ULONGEST val)
4465 {
4466 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4467 bx_write_pc (regs, val);
4468 else
4469 branch_write_pc (regs, dsc, val);
4470 }
4471
4472 /* Helper for writing to registers for displaced stepping. Writing to the PC
4473 has a varying effects depending on the instruction which does the write:
4474 this is controlled by the WRITE_PC argument. */
4475
4476 void
4477 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4478 int regno, ULONGEST val, enum pc_write_style write_pc)
4479 {
4480 if (regno == ARM_PC_REGNUM)
4481 {
4482 if (debug_displaced)
4483 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4484 (unsigned long) val);
4485 switch (write_pc)
4486 {
4487 case BRANCH_WRITE_PC:
4488 branch_write_pc (regs, dsc, val);
4489 break;
4490
4491 case BX_WRITE_PC:
4492 bx_write_pc (regs, val);
4493 break;
4494
4495 case LOAD_WRITE_PC:
4496 load_write_pc (regs, dsc, val);
4497 break;
4498
4499 case ALU_WRITE_PC:
4500 alu_write_pc (regs, dsc, val);
4501 break;
4502
4503 case CANNOT_WRITE_PC:
4504 warning (_("Instruction wrote to PC in an unexpected way when "
4505 "single-stepping"));
4506 break;
4507
4508 default:
4509 internal_error (__FILE__, __LINE__,
4510 _("Invalid argument to displaced_write_reg"));
4511 }
4512
4513 dsc->wrote_to_pc = 1;
4514 }
4515 else
4516 {
4517 if (debug_displaced)
4518 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4519 regno, (unsigned long) val);
4520 regcache_cooked_write_unsigned (regs, regno, val);
4521 }
4522 }
4523
4524 /* This function is used to concisely determine if an instruction INSN
4525 references PC. Register fields of interest in INSN should have the
4526 corresponding fields of BITMASK set to 0b1111. The function
4527 returns return 1 if any of these fields in INSN reference the PC
4528 (also 0b1111, r15), else it returns 0. */
4529
4530 static int
4531 insn_references_pc (uint32_t insn, uint32_t bitmask)
4532 {
4533 uint32_t lowbit = 1;
4534
4535 while (bitmask != 0)
4536 {
4537 uint32_t mask;
4538
4539 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4540 ;
4541
4542 if (!lowbit)
4543 break;
4544
4545 mask = lowbit * 0xf;
4546
4547 if ((insn & mask) == mask)
4548 return 1;
4549
4550 bitmask &= ~mask;
4551 }
4552
4553 return 0;
4554 }
4555
4556 /* The simplest copy function. Many instructions have the same effect no
4557 matter what address they are executed at: in those cases, use this. */
4558
4559 static int
4560 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4561 const char *iname, arm_displaced_step_closure *dsc)
4562 {
4563 if (debug_displaced)
4564 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4565 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4566 iname);
4567
4568 dsc->modinsn[0] = insn;
4569
4570 return 0;
4571 }
4572
4573 static int
4574 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4575 uint16_t insn2, const char *iname,
4576 arm_displaced_step_closure *dsc)
4577 {
4578 if (debug_displaced)
4579 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4580 "opcode/class '%s' unmodified\n", insn1, insn2,
4581 iname);
4582
4583 dsc->modinsn[0] = insn1;
4584 dsc->modinsn[1] = insn2;
4585 dsc->numinsns = 2;
4586
4587 return 0;
4588 }
4589
4590 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4591 modification. */
4592 static int
4593 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4594 const char *iname,
4595 arm_displaced_step_closure *dsc)
4596 {
4597 if (debug_displaced)
4598 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4599 "opcode/class '%s' unmodified\n", insn,
4600 iname);
4601
4602 dsc->modinsn[0] = insn;
4603
4604 return 0;
4605 }
4606
4607 /* Preload instructions with immediate offset. */
4608
4609 static void
4610 cleanup_preload (struct gdbarch *gdbarch,
4611 struct regcache *regs, arm_displaced_step_closure *dsc)
4612 {
4613 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4614 if (!dsc->u.preload.immed)
4615 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4616 }
4617
4618 static void
4619 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4620 arm_displaced_step_closure *dsc, unsigned int rn)
4621 {
4622 ULONGEST rn_val;
4623 /* Preload instructions:
4624
4625 {pli/pld} [rn, #+/-imm]
4626 ->
4627 {pli/pld} [r0, #+/-imm]. */
4628
4629 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4630 rn_val = displaced_read_reg (regs, dsc, rn);
4631 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4632 dsc->u.preload.immed = 1;
4633
4634 dsc->cleanup = &cleanup_preload;
4635 }
4636
4637 static int
4638 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4639 arm_displaced_step_closure *dsc)
4640 {
4641 unsigned int rn = bits (insn, 16, 19);
4642
4643 if (!insn_references_pc (insn, 0x000f0000ul))
4644 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4645
4646 if (debug_displaced)
4647 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4648 (unsigned long) insn);
4649
4650 dsc->modinsn[0] = insn & 0xfff0ffff;
4651
4652 install_preload (gdbarch, regs, dsc, rn);
4653
4654 return 0;
4655 }
4656
4657 static int
4658 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4659 struct regcache *regs, arm_displaced_step_closure *dsc)
4660 {
4661 unsigned int rn = bits (insn1, 0, 3);
4662 unsigned int u_bit = bit (insn1, 7);
4663 int imm12 = bits (insn2, 0, 11);
4664 ULONGEST pc_val;
4665
4666 if (rn != ARM_PC_REGNUM)
4667 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4668
4669 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4670 PLD (literal) Encoding T1. */
4671 if (debug_displaced)
4672 fprintf_unfiltered (gdb_stdlog,
4673 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4674 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4675 imm12);
4676
4677 if (!u_bit)
4678 imm12 = -1 * imm12;
4679
4680 /* Rewrite instruction {pli/pld} PC imm12 into:
4681 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4682
4683 {pli/pld} [r0, r1]
4684
4685 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4686
4687 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4688 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4689
4690 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4691
4692 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4693 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4694 dsc->u.preload.immed = 0;
4695
4696 /* {pli/pld} [r0, r1] */
4697 dsc->modinsn[0] = insn1 & 0xfff0;
4698 dsc->modinsn[1] = 0xf001;
4699 dsc->numinsns = 2;
4700
4701 dsc->cleanup = &cleanup_preload;
4702 return 0;
4703 }
4704
4705 /* Preload instructions with register offset. */
4706
4707 static void
4708 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4709 arm_displaced_step_closure *dsc, unsigned int rn,
4710 unsigned int rm)
4711 {
4712 ULONGEST rn_val, rm_val;
4713
4714 /* Preload register-offset instructions:
4715
4716 {pli/pld} [rn, rm {, shift}]
4717 ->
4718 {pli/pld} [r0, r1 {, shift}]. */
4719
4720 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4721 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4722 rn_val = displaced_read_reg (regs, dsc, rn);
4723 rm_val = displaced_read_reg (regs, dsc, rm);
4724 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4725 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4726 dsc->u.preload.immed = 0;
4727
4728 dsc->cleanup = &cleanup_preload;
4729 }
4730
4731 static int
4732 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4733 struct regcache *regs,
4734 arm_displaced_step_closure *dsc)
4735 {
4736 unsigned int rn = bits (insn, 16, 19);
4737 unsigned int rm = bits (insn, 0, 3);
4738
4739
4740 if (!insn_references_pc (insn, 0x000f000ful))
4741 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4742
4743 if (debug_displaced)
4744 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4745 (unsigned long) insn);
4746
4747 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4748
4749 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4750 return 0;
4751 }
4752
4753 /* Copy/cleanup coprocessor load and store instructions. */
4754
4755 static void
4756 cleanup_copro_load_store (struct gdbarch *gdbarch,
4757 struct regcache *regs,
4758 arm_displaced_step_closure *dsc)
4759 {
4760 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4761
4762 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4763
4764 if (dsc->u.ldst.writeback)
4765 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4766 }
4767
4768 static void
4769 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4770 arm_displaced_step_closure *dsc,
4771 int writeback, unsigned int rn)
4772 {
4773 ULONGEST rn_val;
4774
4775 /* Coprocessor load/store instructions:
4776
4777 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4778 ->
4779 {stc/stc2} [r0, #+/-imm].
4780
4781 ldc/ldc2 are handled identically. */
4782
4783 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4784 rn_val = displaced_read_reg (regs, dsc, rn);
4785 /* PC should be 4-byte aligned. */
4786 rn_val = rn_val & 0xfffffffc;
4787 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4788
4789 dsc->u.ldst.writeback = writeback;
4790 dsc->u.ldst.rn = rn;
4791
4792 dsc->cleanup = &cleanup_copro_load_store;
4793 }
4794
4795 static int
4796 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4797 struct regcache *regs,
4798 arm_displaced_step_closure *dsc)
4799 {
4800 unsigned int rn = bits (insn, 16, 19);
4801
4802 if (!insn_references_pc (insn, 0x000f0000ul))
4803 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4804
4805 if (debug_displaced)
4806 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4807 "load/store insn %.8lx\n", (unsigned long) insn);
4808
4809 dsc->modinsn[0] = insn & 0xfff0ffff;
4810
4811 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4812
4813 return 0;
4814 }
4815
4816 static int
4817 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4818 uint16_t insn2, struct regcache *regs,
4819 arm_displaced_step_closure *dsc)
4820 {
4821 unsigned int rn = bits (insn1, 0, 3);
4822
4823 if (rn != ARM_PC_REGNUM)
4824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4825 "copro load/store", dsc);
4826
4827 if (debug_displaced)
4828 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4829 "load/store insn %.4x%.4x\n", insn1, insn2);
4830
4831 dsc->modinsn[0] = insn1 & 0xfff0;
4832 dsc->modinsn[1] = insn2;
4833 dsc->numinsns = 2;
4834
4835 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4836 doesn't support writeback, so pass 0. */
4837 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4838
4839 return 0;
4840 }
4841
4842 /* Clean up branch instructions (actually perform the branch, by setting
4843 PC). */
4844
4845 static void
4846 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4847 arm_displaced_step_closure *dsc)
4848 {
4849 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4850 int branch_taken = condition_true (dsc->u.branch.cond, status);
4851 enum pc_write_style write_pc = dsc->u.branch.exchange
4852 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4853
4854 if (!branch_taken)
4855 return;
4856
4857 if (dsc->u.branch.link)
4858 {
4859 /* The value of LR should be the next insn of current one. In order
4860 not to confuse logic hanlding later insn `bx lr', if current insn mode
4861 is Thumb, the bit 0 of LR value should be set to 1. */
4862 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4863
4864 if (dsc->is_thumb)
4865 next_insn_addr |= 0x1;
4866
4867 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4868 CANNOT_WRITE_PC);
4869 }
4870
4871 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4872 }
4873
4874 /* Copy B/BL/BLX instructions with immediate destinations. */
4875
4876 static void
4877 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4878 arm_displaced_step_closure *dsc,
4879 unsigned int cond, int exchange, int link, long offset)
4880 {
4881 /* Implement "BL<cond> <label>" as:
4882
4883 Preparation: cond <- instruction condition
4884 Insn: mov r0, r0 (nop)
4885 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4886
4887 B<cond> similar, but don't set r14 in cleanup. */
4888
4889 dsc->u.branch.cond = cond;
4890 dsc->u.branch.link = link;
4891 dsc->u.branch.exchange = exchange;
4892
4893 dsc->u.branch.dest = dsc->insn_addr;
4894 if (link && exchange)
4895 /* For BLX, offset is computed from the Align (PC, 4). */
4896 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4897
4898 if (dsc->is_thumb)
4899 dsc->u.branch.dest += 4 + offset;
4900 else
4901 dsc->u.branch.dest += 8 + offset;
4902
4903 dsc->cleanup = &cleanup_branch;
4904 }
4905 static int
4906 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4907 struct regcache *regs, arm_displaced_step_closure *dsc)
4908 {
4909 unsigned int cond = bits (insn, 28, 31);
4910 int exchange = (cond == 0xf);
4911 int link = exchange || bit (insn, 24);
4912 long offset;
4913
4914 if (debug_displaced)
4915 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4916 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4917 (unsigned long) insn);
4918 if (exchange)
4919 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4920 then arrange the switch into Thumb mode. */
4921 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4922 else
4923 offset = bits (insn, 0, 23) << 2;
4924
4925 if (bit (offset, 25))
4926 offset = offset | ~0x3ffffff;
4927
4928 dsc->modinsn[0] = ARM_NOP;
4929
4930 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4931 return 0;
4932 }
4933
4934 static int
4935 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4936 uint16_t insn2, struct regcache *regs,
4937 arm_displaced_step_closure *dsc)
4938 {
4939 int link = bit (insn2, 14);
4940 int exchange = link && !bit (insn2, 12);
4941 int cond = INST_AL;
4942 long offset = 0;
4943 int j1 = bit (insn2, 13);
4944 int j2 = bit (insn2, 11);
4945 int s = sbits (insn1, 10, 10);
4946 int i1 = !(j1 ^ bit (insn1, 10));
4947 int i2 = !(j2 ^ bit (insn1, 10));
4948
4949 if (!link && !exchange) /* B */
4950 {
4951 offset = (bits (insn2, 0, 10) << 1);
4952 if (bit (insn2, 12)) /* Encoding T4 */
4953 {
4954 offset |= (bits (insn1, 0, 9) << 12)
4955 | (i2 << 22)
4956 | (i1 << 23)
4957 | (s << 24);
4958 cond = INST_AL;
4959 }
4960 else /* Encoding T3 */
4961 {
4962 offset |= (bits (insn1, 0, 5) << 12)
4963 | (j1 << 18)
4964 | (j2 << 19)
4965 | (s << 20);
4966 cond = bits (insn1, 6, 9);
4967 }
4968 }
4969 else
4970 {
4971 offset = (bits (insn1, 0, 9) << 12);
4972 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4973 offset |= exchange ?
4974 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4975 }
4976
4977 if (debug_displaced)
4978 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4979 "%.4x %.4x with offset %.8lx\n",
4980 link ? (exchange) ? "blx" : "bl" : "b",
4981 insn1, insn2, offset);
4982
4983 dsc->modinsn[0] = THUMB_NOP;
4984
4985 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4986 return 0;
4987 }
4988
4989 /* Copy B Thumb instructions. */
4990 static int
4991 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4992 arm_displaced_step_closure *dsc)
4993 {
4994 unsigned int cond = 0;
4995 int offset = 0;
4996 unsigned short bit_12_15 = bits (insn, 12, 15);
4997 CORE_ADDR from = dsc->insn_addr;
4998
4999 if (bit_12_15 == 0xd)
5000 {
5001 /* offset = SignExtend (imm8:0, 32) */
5002 offset = sbits ((insn << 1), 0, 8);
5003 cond = bits (insn, 8, 11);
5004 }
5005 else if (bit_12_15 == 0xe) /* Encoding T2 */
5006 {
5007 offset = sbits ((insn << 1), 0, 11);
5008 cond = INST_AL;
5009 }
5010
5011 if (debug_displaced)
5012 fprintf_unfiltered (gdb_stdlog,
5013 "displaced: copying b immediate insn %.4x "
5014 "with offset %d\n", insn, offset);
5015
5016 dsc->u.branch.cond = cond;
5017 dsc->u.branch.link = 0;
5018 dsc->u.branch.exchange = 0;
5019 dsc->u.branch.dest = from + 4 + offset;
5020
5021 dsc->modinsn[0] = THUMB_NOP;
5022
5023 dsc->cleanup = &cleanup_branch;
5024
5025 return 0;
5026 }
5027
5028 /* Copy BX/BLX with register-specified destinations. */
5029
5030 static void
5031 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5032 arm_displaced_step_closure *dsc, int link,
5033 unsigned int cond, unsigned int rm)
5034 {
5035 /* Implement {BX,BLX}<cond> <reg>" as:
5036
5037 Preparation: cond <- instruction condition
5038 Insn: mov r0, r0 (nop)
5039 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5040
5041 Don't set r14 in cleanup for BX. */
5042
5043 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5044
5045 dsc->u.branch.cond = cond;
5046 dsc->u.branch.link = link;
5047
5048 dsc->u.branch.exchange = 1;
5049
5050 dsc->cleanup = &cleanup_branch;
5051 }
5052
5053 static int
5054 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5055 struct regcache *regs, arm_displaced_step_closure *dsc)
5056 {
5057 unsigned int cond = bits (insn, 28, 31);
5058 /* BX: x12xxx1x
5059 BLX: x12xxx3x. */
5060 int link = bit (insn, 5);
5061 unsigned int rm = bits (insn, 0, 3);
5062
5063 if (debug_displaced)
5064 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5065 (unsigned long) insn);
5066
5067 dsc->modinsn[0] = ARM_NOP;
5068
5069 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5070 return 0;
5071 }
5072
5073 static int
5074 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5075 struct regcache *regs,
5076 arm_displaced_step_closure *dsc)
5077 {
5078 int link = bit (insn, 7);
5079 unsigned int rm = bits (insn, 3, 6);
5080
5081 if (debug_displaced)
5082 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5083 (unsigned short) insn);
5084
5085 dsc->modinsn[0] = THUMB_NOP;
5086
5087 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5088
5089 return 0;
5090 }
5091
5092
5093 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5094
5095 static void
5096 cleanup_alu_imm (struct gdbarch *gdbarch,
5097 struct regcache *regs, arm_displaced_step_closure *dsc)
5098 {
5099 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5100 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5101 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5102 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5103 }
5104
5105 static int
5106 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5107 arm_displaced_step_closure *dsc)
5108 {
5109 unsigned int rn = bits (insn, 16, 19);
5110 unsigned int rd = bits (insn, 12, 15);
5111 unsigned int op = bits (insn, 21, 24);
5112 int is_mov = (op == 0xd);
5113 ULONGEST rd_val, rn_val;
5114
5115 if (!insn_references_pc (insn, 0x000ff000ul))
5116 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5117
5118 if (debug_displaced)
5119 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5120 "%.8lx\n", is_mov ? "move" : "ALU",
5121 (unsigned long) insn);
5122
5123 /* Instruction is of form:
5124
5125 <op><cond> rd, [rn,] #imm
5126
5127 Rewrite as:
5128
5129 Preparation: tmp1, tmp2 <- r0, r1;
5130 r0, r1 <- rd, rn
5131 Insn: <op><cond> r0, r1, #imm
5132 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5133 */
5134
5135 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5136 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5137 rn_val = displaced_read_reg (regs, dsc, rn);
5138 rd_val = displaced_read_reg (regs, dsc, rd);
5139 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5140 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5141 dsc->rd = rd;
5142
5143 if (is_mov)
5144 dsc->modinsn[0] = insn & 0xfff00fff;
5145 else
5146 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5147
5148 dsc->cleanup = &cleanup_alu_imm;
5149
5150 return 0;
5151 }
5152
5153 static int
5154 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5155 uint16_t insn2, struct regcache *regs,
5156 arm_displaced_step_closure *dsc)
5157 {
5158 unsigned int op = bits (insn1, 5, 8);
5159 unsigned int rn, rm, rd;
5160 ULONGEST rd_val, rn_val;
5161
5162 rn = bits (insn1, 0, 3); /* Rn */
5163 rm = bits (insn2, 0, 3); /* Rm */
5164 rd = bits (insn2, 8, 11); /* Rd */
5165
5166 /* This routine is only called for instruction MOV. */
5167 gdb_assert (op == 0x2 && rn == 0xf);
5168
5169 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5170 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5171
5172 if (debug_displaced)
5173 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5174 "ALU", insn1, insn2);
5175
5176 /* Instruction is of form:
5177
5178 <op><cond> rd, [rn,] #imm
5179
5180 Rewrite as:
5181
5182 Preparation: tmp1, tmp2 <- r0, r1;
5183 r0, r1 <- rd, rn
5184 Insn: <op><cond> r0, r1, #imm
5185 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5186 */
5187
5188 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5189 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5190 rn_val = displaced_read_reg (regs, dsc, rn);
5191 rd_val = displaced_read_reg (regs, dsc, rd);
5192 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5193 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5194 dsc->rd = rd;
5195
5196 dsc->modinsn[0] = insn1;
5197 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5198 dsc->numinsns = 2;
5199
5200 dsc->cleanup = &cleanup_alu_imm;
5201
5202 return 0;
5203 }
5204
5205 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5206
5207 static void
5208 cleanup_alu_reg (struct gdbarch *gdbarch,
5209 struct regcache *regs, arm_displaced_step_closure *dsc)
5210 {
5211 ULONGEST rd_val;
5212 int i;
5213
5214 rd_val = displaced_read_reg (regs, dsc, 0);
5215
5216 for (i = 0; i < 3; i++)
5217 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5218
5219 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5220 }
5221
5222 static void
5223 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5224 arm_displaced_step_closure *dsc,
5225 unsigned int rd, unsigned int rn, unsigned int rm)
5226 {
5227 ULONGEST rd_val, rn_val, rm_val;
5228
5229 /* Instruction is of form:
5230
5231 <op><cond> rd, [rn,] rm [, <shift>]
5232
5233 Rewrite as:
5234
5235 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5236 r0, r1, r2 <- rd, rn, rm
5237 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5238 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5239 */
5240
5241 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5242 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5243 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5244 rd_val = displaced_read_reg (regs, dsc, rd);
5245 rn_val = displaced_read_reg (regs, dsc, rn);
5246 rm_val = displaced_read_reg (regs, dsc, rm);
5247 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5248 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5249 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5250 dsc->rd = rd;
5251
5252 dsc->cleanup = &cleanup_alu_reg;
5253 }
5254
5255 static int
5256 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5257 arm_displaced_step_closure *dsc)
5258 {
5259 unsigned int op = bits (insn, 21, 24);
5260 int is_mov = (op == 0xd);
5261
5262 if (!insn_references_pc (insn, 0x000ff00ful))
5263 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5264
5265 if (debug_displaced)
5266 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5267 is_mov ? "move" : "ALU", (unsigned long) insn);
5268
5269 if (is_mov)
5270 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5271 else
5272 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5273
5274 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5275 bits (insn, 0, 3));
5276 return 0;
5277 }
5278
5279 static int
5280 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5281 struct regcache *regs,
5282 arm_displaced_step_closure *dsc)
5283 {
5284 unsigned rm, rd;
5285
5286 rm = bits (insn, 3, 6);
5287 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5288
5289 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5290 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5291
5292 if (debug_displaced)
5293 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5294 (unsigned short) insn);
5295
5296 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5297
5298 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5299
5300 return 0;
5301 }
5302
5303 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5304
5305 static void
5306 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5307 struct regcache *regs,
5308 arm_displaced_step_closure *dsc)
5309 {
5310 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5311 int i;
5312
5313 for (i = 0; i < 4; i++)
5314 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5315
5316 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5317 }
5318
5319 static void
5320 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5321 arm_displaced_step_closure *dsc,
5322 unsigned int rd, unsigned int rn, unsigned int rm,
5323 unsigned rs)
5324 {
5325 int i;
5326 ULONGEST rd_val, rn_val, rm_val, rs_val;
5327
5328 /* Instruction is of form:
5329
5330 <op><cond> rd, [rn,] rm, <shift> rs
5331
5332 Rewrite as:
5333
5334 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5335 r0, r1, r2, r3 <- rd, rn, rm, rs
5336 Insn: <op><cond> r0, r1, r2, <shift> r3
5337 Cleanup: tmp5 <- r0
5338 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5339 rd <- tmp5
5340 */
5341
5342 for (i = 0; i < 4; i++)
5343 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5344
5345 rd_val = displaced_read_reg (regs, dsc, rd);
5346 rn_val = displaced_read_reg (regs, dsc, rn);
5347 rm_val = displaced_read_reg (regs, dsc, rm);
5348 rs_val = displaced_read_reg (regs, dsc, rs);
5349 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5350 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5351 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5352 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5353 dsc->rd = rd;
5354 dsc->cleanup = &cleanup_alu_shifted_reg;
5355 }
5356
5357 static int
5358 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5359 struct regcache *regs,
5360 arm_displaced_step_closure *dsc)
5361 {
5362 unsigned int op = bits (insn, 21, 24);
5363 int is_mov = (op == 0xd);
5364 unsigned int rd, rn, rm, rs;
5365
5366 if (!insn_references_pc (insn, 0x000fff0ful))
5367 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5368
5369 if (debug_displaced)
5370 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5371 "%.8lx\n", is_mov ? "move" : "ALU",
5372 (unsigned long) insn);
5373
5374 rn = bits (insn, 16, 19);
5375 rm = bits (insn, 0, 3);
5376 rs = bits (insn, 8, 11);
5377 rd = bits (insn, 12, 15);
5378
5379 if (is_mov)
5380 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5381 else
5382 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5383
5384 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5385
5386 return 0;
5387 }
5388
5389 /* Clean up load instructions. */
5390
5391 static void
5392 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5393 arm_displaced_step_closure *dsc)
5394 {
5395 ULONGEST rt_val, rt_val2 = 0, rn_val;
5396
5397 rt_val = displaced_read_reg (regs, dsc, 0);
5398 if (dsc->u.ldst.xfersize == 8)
5399 rt_val2 = displaced_read_reg (regs, dsc, 1);
5400 rn_val = displaced_read_reg (regs, dsc, 2);
5401
5402 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5403 if (dsc->u.ldst.xfersize > 4)
5404 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5405 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5406 if (!dsc->u.ldst.immed)
5407 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5408
5409 /* Handle register writeback. */
5410 if (dsc->u.ldst.writeback)
5411 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5412 /* Put result in right place. */
5413 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5414 if (dsc->u.ldst.xfersize == 8)
5415 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5416 }
5417
5418 /* Clean up store instructions. */
5419
5420 static void
5421 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5422 arm_displaced_step_closure *dsc)
5423 {
5424 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5425
5426 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5427 if (dsc->u.ldst.xfersize > 4)
5428 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5429 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5430 if (!dsc->u.ldst.immed)
5431 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5432 if (!dsc->u.ldst.restore_r4)
5433 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5434
5435 /* Writeback. */
5436 if (dsc->u.ldst.writeback)
5437 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5438 }
5439
5440 /* Copy "extra" load/store instructions. These are halfword/doubleword
5441 transfers, which have a different encoding to byte/word transfers. */
5442
5443 static int
5444 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5445 struct regcache *regs, arm_displaced_step_closure *dsc)
5446 {
5447 unsigned int op1 = bits (insn, 20, 24);
5448 unsigned int op2 = bits (insn, 5, 6);
5449 unsigned int rt = bits (insn, 12, 15);
5450 unsigned int rn = bits (insn, 16, 19);
5451 unsigned int rm = bits (insn, 0, 3);
5452 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5453 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5454 int immed = (op1 & 0x4) != 0;
5455 int opcode;
5456 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5457
5458 if (!insn_references_pc (insn, 0x000ff00ful))
5459 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5460
5461 if (debug_displaced)
5462 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5463 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5464 (unsigned long) insn);
5465
5466 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5467
5468 if (opcode < 0)
5469 internal_error (__FILE__, __LINE__,
5470 _("copy_extra_ld_st: instruction decode error"));
5471
5472 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5473 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5474 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5475 if (!immed)
5476 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5477
5478 rt_val = displaced_read_reg (regs, dsc, rt);
5479 if (bytesize[opcode] == 8)
5480 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5481 rn_val = displaced_read_reg (regs, dsc, rn);
5482 if (!immed)
5483 rm_val = displaced_read_reg (regs, dsc, rm);
5484
5485 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5486 if (bytesize[opcode] == 8)
5487 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5488 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5489 if (!immed)
5490 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5491
5492 dsc->rd = rt;
5493 dsc->u.ldst.xfersize = bytesize[opcode];
5494 dsc->u.ldst.rn = rn;
5495 dsc->u.ldst.immed = immed;
5496 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5497 dsc->u.ldst.restore_r4 = 0;
5498
5499 if (immed)
5500 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5501 ->
5502 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5503 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5504 else
5505 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5506 ->
5507 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5508 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5509
5510 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5511
5512 return 0;
5513 }
5514
5515 /* Copy byte/half word/word loads and stores. */
5516
5517 static void
5518 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5519 arm_displaced_step_closure *dsc, int load,
5520 int immed, int writeback, int size, int usermode,
5521 int rt, int rm, int rn)
5522 {
5523 ULONGEST rt_val, rn_val, rm_val = 0;
5524
5525 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5526 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5527 if (!immed)
5528 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5529 if (!load)
5530 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5531
5532 rt_val = displaced_read_reg (regs, dsc, rt);
5533 rn_val = displaced_read_reg (regs, dsc, rn);
5534 if (!immed)
5535 rm_val = displaced_read_reg (regs, dsc, rm);
5536
5537 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5538 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5539 if (!immed)
5540 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5541 dsc->rd = rt;
5542 dsc->u.ldst.xfersize = size;
5543 dsc->u.ldst.rn = rn;
5544 dsc->u.ldst.immed = immed;
5545 dsc->u.ldst.writeback = writeback;
5546
5547 /* To write PC we can do:
5548
5549 Before this sequence of instructions:
5550 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5551 r2 is the Rn value got from dispalced_read_reg.
5552
5553 Insn1: push {pc} Write address of STR instruction + offset on stack
5554 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5555 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5556 = addr(Insn1) + offset - addr(Insn3) - 8
5557 = offset - 16
5558 Insn4: add r4, r4, #8 r4 = offset - 8
5559 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5560 = from + offset
5561 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5562
5563 Otherwise we don't know what value to write for PC, since the offset is
5564 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5565 of this can be found in Section "Saving from r15" in
5566 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5567
5568 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5569 }
5570
5571
5572 static int
5573 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5574 uint16_t insn2, struct regcache *regs,
5575 arm_displaced_step_closure *dsc, int size)
5576 {
5577 unsigned int u_bit = bit (insn1, 7);
5578 unsigned int rt = bits (insn2, 12, 15);
5579 int imm12 = bits (insn2, 0, 11);
5580 ULONGEST pc_val;
5581
5582 if (debug_displaced)
5583 fprintf_unfiltered (gdb_stdlog,
5584 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5585 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5586 imm12);
5587
5588 if (!u_bit)
5589 imm12 = -1 * imm12;
5590
5591 /* Rewrite instruction LDR Rt imm12 into:
5592
5593 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5594
5595 LDR R0, R2, R3,
5596
5597 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5598
5599
5600 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5601 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5602 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5603
5604 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5605
5606 pc_val = pc_val & 0xfffffffc;
5607
5608 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5609 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5610
5611 dsc->rd = rt;
5612
5613 dsc->u.ldst.xfersize = size;
5614 dsc->u.ldst.immed = 0;
5615 dsc->u.ldst.writeback = 0;
5616 dsc->u.ldst.restore_r4 = 0;
5617
5618 /* LDR R0, R2, R3 */
5619 dsc->modinsn[0] = 0xf852;
5620 dsc->modinsn[1] = 0x3;
5621 dsc->numinsns = 2;
5622
5623 dsc->cleanup = &cleanup_load;
5624
5625 return 0;
5626 }
5627
5628 static int
5629 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5630 uint16_t insn2, struct regcache *regs,
5631 arm_displaced_step_closure *dsc,
5632 int writeback, int immed)
5633 {
5634 unsigned int rt = bits (insn2, 12, 15);
5635 unsigned int rn = bits (insn1, 0, 3);
5636 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5637 /* In LDR (register), there is also a register Rm, which is not allowed to
5638 be PC, so we don't have to check it. */
5639
5640 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5641 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5642 dsc);
5643
5644 if (debug_displaced)
5645 fprintf_unfiltered (gdb_stdlog,
5646 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5647 rt, rn, insn1, insn2);
5648
5649 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5650 0, rt, rm, rn);
5651
5652 dsc->u.ldst.restore_r4 = 0;
5653
5654 if (immed)
5655 /* ldr[b]<cond> rt, [rn, #imm], etc.
5656 ->
5657 ldr[b]<cond> r0, [r2, #imm]. */
5658 {
5659 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5660 dsc->modinsn[1] = insn2 & 0x0fff;
5661 }
5662 else
5663 /* ldr[b]<cond> rt, [rn, rm], etc.
5664 ->
5665 ldr[b]<cond> r0, [r2, r3]. */
5666 {
5667 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5668 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5669 }
5670
5671 dsc->numinsns = 2;
5672
5673 return 0;
5674 }
5675
5676
5677 static int
5678 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5679 struct regcache *regs,
5680 arm_displaced_step_closure *dsc,
5681 int load, int size, int usermode)
5682 {
5683 int immed = !bit (insn, 25);
5684 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5685 unsigned int rt = bits (insn, 12, 15);
5686 unsigned int rn = bits (insn, 16, 19);
5687 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5688
5689 if (!insn_references_pc (insn, 0x000ff00ful))
5690 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5691
5692 if (debug_displaced)
5693 fprintf_unfiltered (gdb_stdlog,
5694 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5695 load ? (size == 1 ? "ldrb" : "ldr")
5696 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5697 rt, rn,
5698 (unsigned long) insn);
5699
5700 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5701 usermode, rt, rm, rn);
5702
5703 if (load || rt != ARM_PC_REGNUM)
5704 {
5705 dsc->u.ldst.restore_r4 = 0;
5706
5707 if (immed)
5708 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5709 ->
5710 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5711 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5712 else
5713 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5714 ->
5715 {ldr,str}[b]<cond> r0, [r2, r3]. */
5716 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5717 }
5718 else
5719 {
5720 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5721 dsc->u.ldst.restore_r4 = 1;
5722 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5723 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5724 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5725 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5726 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5727
5728 /* As above. */
5729 if (immed)
5730 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5731 else
5732 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5733
5734 dsc->numinsns = 6;
5735 }
5736
5737 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5738
5739 return 0;
5740 }
5741
5742 /* Cleanup LDM instructions with fully-populated register list. This is an
5743 unfortunate corner case: it's impossible to implement correctly by modifying
5744 the instruction. The issue is as follows: we have an instruction,
5745
5746 ldm rN, {r0-r15}
5747
5748 which we must rewrite to avoid loading PC. A possible solution would be to
5749 do the load in two halves, something like (with suitable cleanup
5750 afterwards):
5751
5752 mov r8, rN
5753 ldm[id][ab] r8!, {r0-r7}
5754 str r7, <temp>
5755 ldm[id][ab] r8, {r7-r14}
5756 <bkpt>
5757
5758 but at present there's no suitable place for <temp>, since the scratch space
5759 is overwritten before the cleanup routine is called. For now, we simply
5760 emulate the instruction. */
5761
5762 static void
5763 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5764 arm_displaced_step_closure *dsc)
5765 {
5766 int inc = dsc->u.block.increment;
5767 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5768 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5769 uint32_t regmask = dsc->u.block.regmask;
5770 int regno = inc ? 0 : 15;
5771 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5772 int exception_return = dsc->u.block.load && dsc->u.block.user
5773 && (regmask & 0x8000) != 0;
5774 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5775 int do_transfer = condition_true (dsc->u.block.cond, status);
5776 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5777
5778 if (!do_transfer)
5779 return;
5780
5781 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5782 sensible we can do here. Complain loudly. */
5783 if (exception_return)
5784 error (_("Cannot single-step exception return"));
5785
5786 /* We don't handle any stores here for now. */
5787 gdb_assert (dsc->u.block.load != 0);
5788
5789 if (debug_displaced)
5790 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5791 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5792 dsc->u.block.increment ? "inc" : "dec",
5793 dsc->u.block.before ? "before" : "after");
5794
5795 while (regmask)
5796 {
5797 uint32_t memword;
5798
5799 if (inc)
5800 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5801 regno++;
5802 else
5803 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5804 regno--;
5805
5806 xfer_addr += bump_before;
5807
5808 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5809 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5810
5811 xfer_addr += bump_after;
5812
5813 regmask &= ~(1 << regno);
5814 }
5815
5816 if (dsc->u.block.writeback)
5817 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5818 CANNOT_WRITE_PC);
5819 }
5820
5821 /* Clean up an STM which included the PC in the register list. */
5822
5823 static void
5824 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5825 arm_displaced_step_closure *dsc)
5826 {
5827 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5828 int store_executed = condition_true (dsc->u.block.cond, status);
5829 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5830 CORE_ADDR stm_insn_addr;
5831 uint32_t pc_val;
5832 long offset;
5833 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5834
5835 /* If condition code fails, there's nothing else to do. */
5836 if (!store_executed)
5837 return;
5838
5839 if (dsc->u.block.increment)
5840 {
5841 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5842
5843 if (dsc->u.block.before)
5844 pc_stored_at += 4;
5845 }
5846 else
5847 {
5848 pc_stored_at = dsc->u.block.xfer_addr;
5849
5850 if (dsc->u.block.before)
5851 pc_stored_at -= 4;
5852 }
5853
5854 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5855 stm_insn_addr = dsc->scratch_base;
5856 offset = pc_val - stm_insn_addr;
5857
5858 if (debug_displaced)
5859 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5860 "STM instruction\n", offset);
5861
5862 /* Rewrite the stored PC to the proper value for the non-displaced original
5863 instruction. */
5864 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5865 dsc->insn_addr + offset);
5866 }
5867
5868 /* Clean up an LDM which includes the PC in the register list. We clumped all
5869 the registers in the transferred list into a contiguous range r0...rX (to
5870 avoid loading PC directly and losing control of the debugged program), so we
5871 must undo that here. */
5872
5873 static void
5874 cleanup_block_load_pc (struct gdbarch *gdbarch,
5875 struct regcache *regs,
5876 arm_displaced_step_closure *dsc)
5877 {
5878 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5879 int load_executed = condition_true (dsc->u.block.cond, status);
5880 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5881 unsigned int regs_loaded = bitcount (mask);
5882 unsigned int num_to_shuffle = regs_loaded, clobbered;
5883
5884 /* The method employed here will fail if the register list is fully populated
5885 (we need to avoid loading PC directly). */
5886 gdb_assert (num_to_shuffle < 16);
5887
5888 if (!load_executed)
5889 return;
5890
5891 clobbered = (1 << num_to_shuffle) - 1;
5892
5893 while (num_to_shuffle > 0)
5894 {
5895 if ((mask & (1 << write_reg)) != 0)
5896 {
5897 unsigned int read_reg = num_to_shuffle - 1;
5898
5899 if (read_reg != write_reg)
5900 {
5901 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5902 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5903 if (debug_displaced)
5904 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5905 "loaded register r%d to r%d\n"), read_reg,
5906 write_reg);
5907 }
5908 else if (debug_displaced)
5909 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5910 "r%d already in the right place\n"),
5911 write_reg);
5912
5913 clobbered &= ~(1 << write_reg);
5914
5915 num_to_shuffle--;
5916 }
5917
5918 write_reg--;
5919 }
5920
5921 /* Restore any registers we scribbled over. */
5922 for (write_reg = 0; clobbered != 0; write_reg++)
5923 {
5924 if ((clobbered & (1 << write_reg)) != 0)
5925 {
5926 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5927 CANNOT_WRITE_PC);
5928 if (debug_displaced)
5929 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5930 "clobbered register r%d\n"), write_reg);
5931 clobbered &= ~(1 << write_reg);
5932 }
5933 }
5934
5935 /* Perform register writeback manually. */
5936 if (dsc->u.block.writeback)
5937 {
5938 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5939
5940 if (dsc->u.block.increment)
5941 new_rn_val += regs_loaded * 4;
5942 else
5943 new_rn_val -= regs_loaded * 4;
5944
5945 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5946 CANNOT_WRITE_PC);
5947 }
5948 }
5949
5950 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5951 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5952
5953 static int
5954 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5955 struct regcache *regs,
5956 arm_displaced_step_closure *dsc)
5957 {
5958 int load = bit (insn, 20);
5959 int user = bit (insn, 22);
5960 int increment = bit (insn, 23);
5961 int before = bit (insn, 24);
5962 int writeback = bit (insn, 21);
5963 int rn = bits (insn, 16, 19);
5964
5965 /* Block transfers which don't mention PC can be run directly
5966 out-of-line. */
5967 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5968 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5969
5970 if (rn == ARM_PC_REGNUM)
5971 {
5972 warning (_("displaced: Unpredictable LDM or STM with "
5973 "base register r15"));
5974 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5975 }
5976
5977 if (debug_displaced)
5978 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5979 "%.8lx\n", (unsigned long) insn);
5980
5981 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5982 dsc->u.block.rn = rn;
5983
5984 dsc->u.block.load = load;
5985 dsc->u.block.user = user;
5986 dsc->u.block.increment = increment;
5987 dsc->u.block.before = before;
5988 dsc->u.block.writeback = writeback;
5989 dsc->u.block.cond = bits (insn, 28, 31);
5990
5991 dsc->u.block.regmask = insn & 0xffff;
5992
5993 if (load)
5994 {
5995 if ((insn & 0xffff) == 0xffff)
5996 {
5997 /* LDM with a fully-populated register list. This case is
5998 particularly tricky. Implement for now by fully emulating the
5999 instruction (which might not behave perfectly in all cases, but
6000 these instructions should be rare enough for that not to matter
6001 too much). */
6002 dsc->modinsn[0] = ARM_NOP;
6003
6004 dsc->cleanup = &cleanup_block_load_all;
6005 }
6006 else
6007 {
6008 /* LDM of a list of registers which includes PC. Implement by
6009 rewriting the list of registers to be transferred into a
6010 contiguous chunk r0...rX before doing the transfer, then shuffling
6011 registers into the correct places in the cleanup routine. */
6012 unsigned int regmask = insn & 0xffff;
6013 unsigned int num_in_list = bitcount (regmask), new_regmask;
6014 unsigned int i;
6015
6016 for (i = 0; i < num_in_list; i++)
6017 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6018
6019 /* Writeback makes things complicated. We need to avoid clobbering
6020 the base register with one of the registers in our modified
6021 register list, but just using a different register can't work in
6022 all cases, e.g.:
6023
6024 ldm r14!, {r0-r13,pc}
6025
6026 which would need to be rewritten as:
6027
6028 ldm rN!, {r0-r14}
6029
6030 but that can't work, because there's no free register for N.
6031
6032 Solve this by turning off the writeback bit, and emulating
6033 writeback manually in the cleanup routine. */
6034
6035 if (writeback)
6036 insn &= ~(1 << 21);
6037
6038 new_regmask = (1 << num_in_list) - 1;
6039
6040 if (debug_displaced)
6041 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6042 "{..., pc}: original reg list %.4x, modified "
6043 "list %.4x\n"), rn, writeback ? "!" : "",
6044 (int) insn & 0xffff, new_regmask);
6045
6046 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6047
6048 dsc->cleanup = &cleanup_block_load_pc;
6049 }
6050 }
6051 else
6052 {
6053 /* STM of a list of registers which includes PC. Run the instruction
6054 as-is, but out of line: this will store the wrong value for the PC,
6055 so we must manually fix up the memory in the cleanup routine.
6056 Doing things this way has the advantage that we can auto-detect
6057 the offset of the PC write (which is architecture-dependent) in
6058 the cleanup routine. */
6059 dsc->modinsn[0] = insn;
6060
6061 dsc->cleanup = &cleanup_block_store_pc;
6062 }
6063
6064 return 0;
6065 }
6066
6067 static int
6068 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6069 struct regcache *regs,
6070 arm_displaced_step_closure *dsc)
6071 {
6072 int rn = bits (insn1, 0, 3);
6073 int load = bit (insn1, 4);
6074 int writeback = bit (insn1, 5);
6075
6076 /* Block transfers which don't mention PC can be run directly
6077 out-of-line. */
6078 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6079 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6080
6081 if (rn == ARM_PC_REGNUM)
6082 {
6083 warning (_("displaced: Unpredictable LDM or STM with "
6084 "base register r15"));
6085 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6086 "unpredictable ldm/stm", dsc);
6087 }
6088
6089 if (debug_displaced)
6090 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6091 "%.4x%.4x\n", insn1, insn2);
6092
6093 /* Clear bit 13, since it should be always zero. */
6094 dsc->u.block.regmask = (insn2 & 0xdfff);
6095 dsc->u.block.rn = rn;
6096
6097 dsc->u.block.load = load;
6098 dsc->u.block.user = 0;
6099 dsc->u.block.increment = bit (insn1, 7);
6100 dsc->u.block.before = bit (insn1, 8);
6101 dsc->u.block.writeback = writeback;
6102 dsc->u.block.cond = INST_AL;
6103 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6104
6105 if (load)
6106 {
6107 if (dsc->u.block.regmask == 0xffff)
6108 {
6109 /* This branch is impossible to happen. */
6110 gdb_assert (0);
6111 }
6112 else
6113 {
6114 unsigned int regmask = dsc->u.block.regmask;
6115 unsigned int num_in_list = bitcount (regmask), new_regmask;
6116 unsigned int i;
6117
6118 for (i = 0; i < num_in_list; i++)
6119 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6120
6121 if (writeback)
6122 insn1 &= ~(1 << 5);
6123
6124 new_regmask = (1 << num_in_list) - 1;
6125
6126 if (debug_displaced)
6127 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6128 "{..., pc}: original reg list %.4x, modified "
6129 "list %.4x\n"), rn, writeback ? "!" : "",
6130 (int) dsc->u.block.regmask, new_regmask);
6131
6132 dsc->modinsn[0] = insn1;
6133 dsc->modinsn[1] = (new_regmask & 0xffff);
6134 dsc->numinsns = 2;
6135
6136 dsc->cleanup = &cleanup_block_load_pc;
6137 }
6138 }
6139 else
6140 {
6141 dsc->modinsn[0] = insn1;
6142 dsc->modinsn[1] = insn2;
6143 dsc->numinsns = 2;
6144 dsc->cleanup = &cleanup_block_store_pc;
6145 }
6146 return 0;
6147 }
6148
6149 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6150 This is used to avoid a dependency on BFD's bfd_endian enum. */
6151
6152 ULONGEST
6153 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6154 int byte_order)
6155 {
6156 return read_memory_unsigned_integer (memaddr, len,
6157 (enum bfd_endian) byte_order);
6158 }
6159
6160 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6161
6162 CORE_ADDR
6163 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6164 CORE_ADDR val)
6165 {
6166 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6167 }
6168
6169 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6170
6171 static CORE_ADDR
6172 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6173 {
6174 return 0;
6175 }
6176
6177 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6178
6179 int
6180 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6181 {
6182 return arm_is_thumb (self->regcache);
6183 }
6184
6185 /* single_step() is called just before we want to resume the inferior,
6186 if we want to single-step it but there is no hardware or kernel
6187 single-step support. We find the target of the coming instructions
6188 and breakpoint them. */
6189
6190 std::vector<CORE_ADDR>
6191 arm_software_single_step (struct regcache *regcache)
6192 {
6193 struct gdbarch *gdbarch = regcache->arch ();
6194 struct arm_get_next_pcs next_pcs_ctx;
6195
6196 arm_get_next_pcs_ctor (&next_pcs_ctx,
6197 &arm_get_next_pcs_ops,
6198 gdbarch_byte_order (gdbarch),
6199 gdbarch_byte_order_for_code (gdbarch),
6200 0,
6201 regcache);
6202
6203 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6204
6205 for (CORE_ADDR &pc_ref : next_pcs)
6206 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6207
6208 return next_pcs;
6209 }
6210
6211 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6212 for Linux, where some SVC instructions must be treated specially. */
6213
6214 static void
6215 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6216 arm_displaced_step_closure *dsc)
6217 {
6218 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6219
6220 if (debug_displaced)
6221 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6222 "%.8lx\n", (unsigned long) resume_addr);
6223
6224 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6225 }
6226
6227
6228 /* Common copy routine for svc instruciton. */
6229
6230 static int
6231 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6232 arm_displaced_step_closure *dsc)
6233 {
6234 /* Preparation: none.
6235 Insn: unmodified svc.
6236 Cleanup: pc <- insn_addr + insn_size. */
6237
6238 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6239 instruction. */
6240 dsc->wrote_to_pc = 1;
6241
6242 /* Allow OS-specific code to override SVC handling. */
6243 if (dsc->u.svc.copy_svc_os)
6244 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6245 else
6246 {
6247 dsc->cleanup = &cleanup_svc;
6248 return 0;
6249 }
6250 }
6251
6252 static int
6253 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6254 struct regcache *regs, arm_displaced_step_closure *dsc)
6255 {
6256
6257 if (debug_displaced)
6258 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6259 (unsigned long) insn);
6260
6261 dsc->modinsn[0] = insn;
6262
6263 return install_svc (gdbarch, regs, dsc);
6264 }
6265
6266 static int
6267 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6268 struct regcache *regs, arm_displaced_step_closure *dsc)
6269 {
6270
6271 if (debug_displaced)
6272 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6273 insn);
6274
6275 dsc->modinsn[0] = insn;
6276
6277 return install_svc (gdbarch, regs, dsc);
6278 }
6279
6280 /* Copy undefined instructions. */
6281
6282 static int
6283 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6284 arm_displaced_step_closure *dsc)
6285 {
6286 if (debug_displaced)
6287 fprintf_unfiltered (gdb_stdlog,
6288 "displaced: copying undefined insn %.8lx\n",
6289 (unsigned long) insn);
6290
6291 dsc->modinsn[0] = insn;
6292
6293 return 0;
6294 }
6295
6296 static int
6297 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6298 arm_displaced_step_closure *dsc)
6299 {
6300
6301 if (debug_displaced)
6302 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6303 "%.4x %.4x\n", (unsigned short) insn1,
6304 (unsigned short) insn2);
6305
6306 dsc->modinsn[0] = insn1;
6307 dsc->modinsn[1] = insn2;
6308 dsc->numinsns = 2;
6309
6310 return 0;
6311 }
6312
6313 /* Copy unpredictable instructions. */
6314
6315 static int
6316 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6317 arm_displaced_step_closure *dsc)
6318 {
6319 if (debug_displaced)
6320 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6321 "%.8lx\n", (unsigned long) insn);
6322
6323 dsc->modinsn[0] = insn;
6324
6325 return 0;
6326 }
6327
6328 /* The decode_* functions are instruction decoding helpers. They mostly follow
6329 the presentation in the ARM ARM. */
6330
6331 static int
6332 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6333 struct regcache *regs,
6334 arm_displaced_step_closure *dsc)
6335 {
6336 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6337 unsigned int rn = bits (insn, 16, 19);
6338
6339 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6340 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6341 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6342 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6343 else if ((op1 & 0x60) == 0x20)
6344 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6345 else if ((op1 & 0x71) == 0x40)
6346 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6347 dsc);
6348 else if ((op1 & 0x77) == 0x41)
6349 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6350 else if ((op1 & 0x77) == 0x45)
6351 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6352 else if ((op1 & 0x77) == 0x51)
6353 {
6354 if (rn != 0xf)
6355 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6356 else
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 }
6359 else if ((op1 & 0x77) == 0x55)
6360 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6361 else if (op1 == 0x57)
6362 switch (op2)
6363 {
6364 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6365 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6366 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6367 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6368 default: return arm_copy_unpred (gdbarch, insn, dsc);
6369 }
6370 else if ((op1 & 0x63) == 0x43)
6371 return arm_copy_unpred (gdbarch, insn, dsc);
6372 else if ((op2 & 0x1) == 0x0)
6373 switch (op1 & ~0x80)
6374 {
6375 case 0x61:
6376 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6377 case 0x65:
6378 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6379 case 0x71: case 0x75:
6380 /* pld/pldw reg. */
6381 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6382 case 0x63: case 0x67: case 0x73: case 0x77:
6383 return arm_copy_unpred (gdbarch, insn, dsc);
6384 default:
6385 return arm_copy_undef (gdbarch, insn, dsc);
6386 }
6387 else
6388 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6389 }
6390
6391 static int
6392 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6393 struct regcache *regs,
6394 arm_displaced_step_closure *dsc)
6395 {
6396 if (bit (insn, 27) == 0)
6397 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6398 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6399 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6400 {
6401 case 0x0: case 0x2:
6402 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6403
6404 case 0x1: case 0x3:
6405 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6406
6407 case 0x4: case 0x5: case 0x6: case 0x7:
6408 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6409
6410 case 0x8:
6411 switch ((insn & 0xe00000) >> 21)
6412 {
6413 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6414 /* stc/stc2. */
6415 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6416
6417 case 0x2:
6418 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6419
6420 default:
6421 return arm_copy_undef (gdbarch, insn, dsc);
6422 }
6423
6424 case 0x9:
6425 {
6426 int rn_f = (bits (insn, 16, 19) == 0xf);
6427 switch ((insn & 0xe00000) >> 21)
6428 {
6429 case 0x1: case 0x3:
6430 /* ldc/ldc2 imm (undefined for rn == pc). */
6431 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6432 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6433
6434 case 0x2:
6435 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6436
6437 case 0x4: case 0x5: case 0x6: case 0x7:
6438 /* ldc/ldc2 lit (undefined for rn != pc). */
6439 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6440 : arm_copy_undef (gdbarch, insn, dsc);
6441
6442 default:
6443 return arm_copy_undef (gdbarch, insn, dsc);
6444 }
6445 }
6446
6447 case 0xa:
6448 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6449
6450 case 0xb:
6451 if (bits (insn, 16, 19) == 0xf)
6452 /* ldc/ldc2 lit. */
6453 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6454 else
6455 return arm_copy_undef (gdbarch, insn, dsc);
6456
6457 case 0xc:
6458 if (bit (insn, 4))
6459 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6460 else
6461 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6462
6463 case 0xd:
6464 if (bit (insn, 4))
6465 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6466 else
6467 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6468
6469 default:
6470 return arm_copy_undef (gdbarch, insn, dsc);
6471 }
6472 }
6473
6474 /* Decode miscellaneous instructions in dp/misc encoding space. */
6475
6476 static int
6477 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6478 struct regcache *regs,
6479 arm_displaced_step_closure *dsc)
6480 {
6481 unsigned int op2 = bits (insn, 4, 6);
6482 unsigned int op = bits (insn, 21, 22);
6483
6484 switch (op2)
6485 {
6486 case 0x0:
6487 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6488
6489 case 0x1:
6490 if (op == 0x1) /* bx. */
6491 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6492 else if (op == 0x3)
6493 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6494 else
6495 return arm_copy_undef (gdbarch, insn, dsc);
6496
6497 case 0x2:
6498 if (op == 0x1)
6499 /* Not really supported. */
6500 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6501 else
6502 return arm_copy_undef (gdbarch, insn, dsc);
6503
6504 case 0x3:
6505 if (op == 0x1)
6506 return arm_copy_bx_blx_reg (gdbarch, insn,
6507 regs, dsc); /* blx register. */
6508 else
6509 return arm_copy_undef (gdbarch, insn, dsc);
6510
6511 case 0x5:
6512 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6513
6514 case 0x7:
6515 if (op == 0x1)
6516 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6517 else if (op == 0x3)
6518 /* Not really supported. */
6519 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6520 /* Fall through. */
6521
6522 default:
6523 return arm_copy_undef (gdbarch, insn, dsc);
6524 }
6525 }
6526
6527 static int
6528 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6529 struct regcache *regs,
6530 arm_displaced_step_closure *dsc)
6531 {
6532 if (bit (insn, 25))
6533 switch (bits (insn, 20, 24))
6534 {
6535 case 0x10:
6536 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6537
6538 case 0x14:
6539 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6540
6541 case 0x12: case 0x16:
6542 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6543
6544 default:
6545 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6546 }
6547 else
6548 {
6549 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6550
6551 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6552 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6553 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6554 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6555 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6556 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6557 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6558 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6559 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6560 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6561 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6562 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6563 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6564 /* 2nd arg means "unprivileged". */
6565 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6566 dsc);
6567 }
6568
6569 /* Should be unreachable. */
6570 return 1;
6571 }
6572
6573 static int
6574 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6575 struct regcache *regs,
6576 arm_displaced_step_closure *dsc)
6577 {
6578 int a = bit (insn, 25), b = bit (insn, 4);
6579 uint32_t op1 = bits (insn, 20, 24);
6580
6581 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6582 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6583 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6584 else if ((!a && (op1 & 0x17) == 0x02)
6585 || (a && (op1 & 0x17) == 0x02 && !b))
6586 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6587 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6588 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6589 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6590 else if ((!a && (op1 & 0x17) == 0x03)
6591 || (a && (op1 & 0x17) == 0x03 && !b))
6592 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6593 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6594 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6595 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6596 else if ((!a && (op1 & 0x17) == 0x06)
6597 || (a && (op1 & 0x17) == 0x06 && !b))
6598 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6599 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6600 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6601 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6602 else if ((!a && (op1 & 0x17) == 0x07)
6603 || (a && (op1 & 0x17) == 0x07 && !b))
6604 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6605
6606 /* Should be unreachable. */
6607 return 1;
6608 }
6609
6610 static int
6611 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6612 arm_displaced_step_closure *dsc)
6613 {
6614 switch (bits (insn, 20, 24))
6615 {
6616 case 0x00: case 0x01: case 0x02: case 0x03:
6617 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6618
6619 case 0x04: case 0x05: case 0x06: case 0x07:
6620 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6621
6622 case 0x08: case 0x09: case 0x0a: case 0x0b:
6623 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6624 return arm_copy_unmodified (gdbarch, insn,
6625 "decode/pack/unpack/saturate/reverse", dsc);
6626
6627 case 0x18:
6628 if (bits (insn, 5, 7) == 0) /* op2. */
6629 {
6630 if (bits (insn, 12, 15) == 0xf)
6631 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6632 else
6633 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6634 }
6635 else
6636 return arm_copy_undef (gdbarch, insn, dsc);
6637
6638 case 0x1a: case 0x1b:
6639 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6640 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6641 else
6642 return arm_copy_undef (gdbarch, insn, dsc);
6643
6644 case 0x1c: case 0x1d:
6645 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6646 {
6647 if (bits (insn, 0, 3) == 0xf)
6648 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6649 else
6650 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6651 }
6652 else
6653 return arm_copy_undef (gdbarch, insn, dsc);
6654
6655 case 0x1e: case 0x1f:
6656 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6657 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6658 else
6659 return arm_copy_undef (gdbarch, insn, dsc);
6660 }
6661
6662 /* Should be unreachable. */
6663 return 1;
6664 }
6665
6666 static int
6667 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6668 struct regcache *regs,
6669 arm_displaced_step_closure *dsc)
6670 {
6671 if (bit (insn, 25))
6672 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6673 else
6674 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6675 }
6676
6677 static int
6678 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6679 struct regcache *regs,
6680 arm_displaced_step_closure *dsc)
6681 {
6682 unsigned int opcode = bits (insn, 20, 24);
6683
6684 switch (opcode)
6685 {
6686 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6687 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6688
6689 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6690 case 0x12: case 0x16:
6691 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6692
6693 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6694 case 0x13: case 0x17:
6695 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6696
6697 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6698 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6699 /* Note: no writeback for these instructions. Bit 25 will always be
6700 zero though (via caller), so the following works OK. */
6701 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6702 }
6703
6704 /* Should be unreachable. */
6705 return 1;
6706 }
6707
6708 /* Decode shifted register instructions. */
6709
6710 static int
6711 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6712 uint16_t insn2, struct regcache *regs,
6713 arm_displaced_step_closure *dsc)
6714 {
6715 /* PC is only allowed to be used in instruction MOV. */
6716
6717 unsigned int op = bits (insn1, 5, 8);
6718 unsigned int rn = bits (insn1, 0, 3);
6719
6720 if (op == 0x2 && rn == 0xf) /* MOV */
6721 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6722 else
6723 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6724 "dp (shift reg)", dsc);
6725 }
6726
6727
6728 /* Decode extension register load/store. Exactly the same as
6729 arm_decode_ext_reg_ld_st. */
6730
6731 static int
6732 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6733 uint16_t insn2, struct regcache *regs,
6734 arm_displaced_step_closure *dsc)
6735 {
6736 unsigned int opcode = bits (insn1, 4, 8);
6737
6738 switch (opcode)
6739 {
6740 case 0x04: case 0x05:
6741 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6742 "vfp/neon vmov", dsc);
6743
6744 case 0x08: case 0x0c: /* 01x00 */
6745 case 0x0a: case 0x0e: /* 01x10 */
6746 case 0x12: case 0x16: /* 10x10 */
6747 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6748 "vfp/neon vstm/vpush", dsc);
6749
6750 case 0x09: case 0x0d: /* 01x01 */
6751 case 0x0b: case 0x0f: /* 01x11 */
6752 case 0x13: case 0x17: /* 10x11 */
6753 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6754 "vfp/neon vldm/vpop", dsc);
6755
6756 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6757 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6758 "vstr", dsc);
6759 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6760 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6761 }
6762
6763 /* Should be unreachable. */
6764 return 1;
6765 }
6766
6767 static int
6768 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6769 struct regcache *regs, arm_displaced_step_closure *dsc)
6770 {
6771 unsigned int op1 = bits (insn, 20, 25);
6772 int op = bit (insn, 4);
6773 unsigned int coproc = bits (insn, 8, 11);
6774
6775 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6776 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6777 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6778 && (coproc & 0xe) != 0xa)
6779 /* stc/stc2. */
6780 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6781 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6782 && (coproc & 0xe) != 0xa)
6783 /* ldc/ldc2 imm/lit. */
6784 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6785 else if ((op1 & 0x3e) == 0x00)
6786 return arm_copy_undef (gdbarch, insn, dsc);
6787 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6788 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6789 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6790 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6791 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6792 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6793 else if ((op1 & 0x30) == 0x20 && !op)
6794 {
6795 if ((coproc & 0xe) == 0xa)
6796 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6797 else
6798 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6799 }
6800 else if ((op1 & 0x30) == 0x20 && op)
6801 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6802 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6803 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6804 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6805 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6806 else if ((op1 & 0x30) == 0x30)
6807 return arm_copy_svc (gdbarch, insn, regs, dsc);
6808 else
6809 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6810 }
6811
6812 static int
6813 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6814 uint16_t insn2, struct regcache *regs,
6815 arm_displaced_step_closure *dsc)
6816 {
6817 unsigned int coproc = bits (insn2, 8, 11);
6818 unsigned int bit_5_8 = bits (insn1, 5, 8);
6819 unsigned int bit_9 = bit (insn1, 9);
6820 unsigned int bit_4 = bit (insn1, 4);
6821
6822 if (bit_9 == 0)
6823 {
6824 if (bit_5_8 == 2)
6825 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6826 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6827 dsc);
6828 else if (bit_5_8 == 0) /* UNDEFINED. */
6829 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6830 else
6831 {
6832 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6833 if ((coproc & 0xe) == 0xa)
6834 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6835 dsc);
6836 else /* coproc is not 101x. */
6837 {
6838 if (bit_4 == 0) /* STC/STC2. */
6839 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6840 "stc/stc2", dsc);
6841 else /* LDC/LDC2 {literal, immeidate}. */
6842 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6843 regs, dsc);
6844 }
6845 }
6846 }
6847 else
6848 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6849
6850 return 0;
6851 }
6852
6853 static void
6854 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6855 arm_displaced_step_closure *dsc, int rd)
6856 {
6857 /* ADR Rd, #imm
6858
6859 Rewrite as:
6860
6861 Preparation: Rd <- PC
6862 Insn: ADD Rd, #imm
6863 Cleanup: Null.
6864 */
6865
6866 /* Rd <- PC */
6867 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6868 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6869 }
6870
6871 static int
6872 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6873 arm_displaced_step_closure *dsc,
6874 int rd, unsigned int imm)
6875 {
6876
6877 /* Encoding T2: ADDS Rd, #imm */
6878 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6879
6880 install_pc_relative (gdbarch, regs, dsc, rd);
6881
6882 return 0;
6883 }
6884
6885 static int
6886 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6887 struct regcache *regs,
6888 arm_displaced_step_closure *dsc)
6889 {
6890 unsigned int rd = bits (insn, 8, 10);
6891 unsigned int imm8 = bits (insn, 0, 7);
6892
6893 if (debug_displaced)
6894 fprintf_unfiltered (gdb_stdlog,
6895 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6896 rd, imm8, insn);
6897
6898 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6899 }
6900
6901 static int
6902 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6903 uint16_t insn2, struct regcache *regs,
6904 arm_displaced_step_closure *dsc)
6905 {
6906 unsigned int rd = bits (insn2, 8, 11);
6907 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6908 extract raw immediate encoding rather than computing immediate. When
6909 generating ADD or SUB instruction, we can simply perform OR operation to
6910 set immediate into ADD. */
6911 unsigned int imm_3_8 = insn2 & 0x70ff;
6912 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6913
6914 if (debug_displaced)
6915 fprintf_unfiltered (gdb_stdlog,
6916 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6917 rd, imm_i, imm_3_8, insn1, insn2);
6918
6919 if (bit (insn1, 7)) /* Encoding T2 */
6920 {
6921 /* Encoding T3: SUB Rd, Rd, #imm */
6922 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6923 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6924 }
6925 else /* Encoding T3 */
6926 {
6927 /* Encoding T3: ADD Rd, Rd, #imm */
6928 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6929 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6930 }
6931 dsc->numinsns = 2;
6932
6933 install_pc_relative (gdbarch, regs, dsc, rd);
6934
6935 return 0;
6936 }
6937
6938 static int
6939 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6940 struct regcache *regs,
6941 arm_displaced_step_closure *dsc)
6942 {
6943 unsigned int rt = bits (insn1, 8, 10);
6944 unsigned int pc;
6945 int imm8 = (bits (insn1, 0, 7) << 2);
6946
6947 /* LDR Rd, #imm8
6948
6949 Rwrite as:
6950
6951 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6952
6953 Insn: LDR R0, [R2, R3];
6954 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6955
6956 if (debug_displaced)
6957 fprintf_unfiltered (gdb_stdlog,
6958 "displaced: copying thumb ldr r%d [pc #%d]\n"
6959 , rt, imm8);
6960
6961 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6962 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6963 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6964 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6965 /* The assembler calculates the required value of the offset from the
6966 Align(PC,4) value of this instruction to the label. */
6967 pc = pc & 0xfffffffc;
6968
6969 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6970 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6971
6972 dsc->rd = rt;
6973 dsc->u.ldst.xfersize = 4;
6974 dsc->u.ldst.rn = 0;
6975 dsc->u.ldst.immed = 0;
6976 dsc->u.ldst.writeback = 0;
6977 dsc->u.ldst.restore_r4 = 0;
6978
6979 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6980
6981 dsc->cleanup = &cleanup_load;
6982
6983 return 0;
6984 }
6985
6986 /* Copy Thumb cbnz/cbz insruction. */
6987
6988 static int
6989 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6990 struct regcache *regs,
6991 arm_displaced_step_closure *dsc)
6992 {
6993 int non_zero = bit (insn1, 11);
6994 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6995 CORE_ADDR from = dsc->insn_addr;
6996 int rn = bits (insn1, 0, 2);
6997 int rn_val = displaced_read_reg (regs, dsc, rn);
6998
6999 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7000 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7001 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7002 condition is false, let it be, cleanup_branch will do nothing. */
7003 if (dsc->u.branch.cond)
7004 {
7005 dsc->u.branch.cond = INST_AL;
7006 dsc->u.branch.dest = from + 4 + imm5;
7007 }
7008 else
7009 dsc->u.branch.dest = from + 2;
7010
7011 dsc->u.branch.link = 0;
7012 dsc->u.branch.exchange = 0;
7013
7014 if (debug_displaced)
7015 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7016 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7017 rn, rn_val, insn1, dsc->u.branch.dest);
7018
7019 dsc->modinsn[0] = THUMB_NOP;
7020
7021 dsc->cleanup = &cleanup_branch;
7022 return 0;
7023 }
7024
7025 /* Copy Table Branch Byte/Halfword */
7026 static int
7027 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7028 uint16_t insn2, struct regcache *regs,
7029 arm_displaced_step_closure *dsc)
7030 {
7031 ULONGEST rn_val, rm_val;
7032 int is_tbh = bit (insn2, 4);
7033 CORE_ADDR halfwords = 0;
7034 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7035
7036 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7037 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7038
7039 if (is_tbh)
7040 {
7041 gdb_byte buf[2];
7042
7043 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7044 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7045 }
7046 else
7047 {
7048 gdb_byte buf[1];
7049
7050 target_read_memory (rn_val + rm_val, buf, 1);
7051 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7052 }
7053
7054 if (debug_displaced)
7055 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7056 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7057 (unsigned int) rn_val, (unsigned int) rm_val,
7058 (unsigned int) halfwords);
7059
7060 dsc->u.branch.cond = INST_AL;
7061 dsc->u.branch.link = 0;
7062 dsc->u.branch.exchange = 0;
7063 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7064
7065 dsc->cleanup = &cleanup_branch;
7066
7067 return 0;
7068 }
7069
7070 static void
7071 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7072 arm_displaced_step_closure *dsc)
7073 {
7074 /* PC <- r7 */
7075 int val = displaced_read_reg (regs, dsc, 7);
7076 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7077
7078 /* r7 <- r8 */
7079 val = displaced_read_reg (regs, dsc, 8);
7080 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7081
7082 /* r8 <- tmp[0] */
7083 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7084
7085 }
7086
7087 static int
7088 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7089 struct regcache *regs,
7090 arm_displaced_step_closure *dsc)
7091 {
7092 dsc->u.block.regmask = insn1 & 0x00ff;
7093
7094 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7095 to :
7096
7097 (1) register list is full, that is, r0-r7 are used.
7098 Prepare: tmp[0] <- r8
7099
7100 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7101 MOV r8, r7; Move value of r7 to r8;
7102 POP {r7}; Store PC value into r7.
7103
7104 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7105
7106 (2) register list is not full, supposing there are N registers in
7107 register list (except PC, 0 <= N <= 7).
7108 Prepare: for each i, 0 - N, tmp[i] <- ri.
7109
7110 POP {r0, r1, ...., rN};
7111
7112 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7113 from tmp[] properly.
7114 */
7115 if (debug_displaced)
7116 fprintf_unfiltered (gdb_stdlog,
7117 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7118 dsc->u.block.regmask, insn1);
7119
7120 if (dsc->u.block.regmask == 0xff)
7121 {
7122 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7123
7124 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7125 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7126 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7127
7128 dsc->numinsns = 3;
7129 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7130 }
7131 else
7132 {
7133 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7134 unsigned int i;
7135 unsigned int new_regmask;
7136
7137 for (i = 0; i < num_in_list + 1; i++)
7138 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7139
7140 new_regmask = (1 << (num_in_list + 1)) - 1;
7141
7142 if (debug_displaced)
7143 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7144 "{..., pc}: original reg list %.4x,"
7145 " modified list %.4x\n"),
7146 (int) dsc->u.block.regmask, new_regmask);
7147
7148 dsc->u.block.regmask |= 0x8000;
7149 dsc->u.block.writeback = 0;
7150 dsc->u.block.cond = INST_AL;
7151
7152 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7153
7154 dsc->cleanup = &cleanup_block_load_pc;
7155 }
7156
7157 return 0;
7158 }
7159
7160 static void
7161 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7162 struct regcache *regs,
7163 arm_displaced_step_closure *dsc)
7164 {
7165 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7166 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7167 int err = 0;
7168
7169 /* 16-bit thumb instructions. */
7170 switch (op_bit_12_15)
7171 {
7172 /* Shift (imme), add, subtract, move and compare. */
7173 case 0: case 1: case 2: case 3:
7174 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7175 "shift/add/sub/mov/cmp",
7176 dsc);
7177 break;
7178 case 4:
7179 switch (op_bit_10_11)
7180 {
7181 case 0: /* Data-processing */
7182 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7183 "data-processing",
7184 dsc);
7185 break;
7186 case 1: /* Special data instructions and branch and exchange. */
7187 {
7188 unsigned short op = bits (insn1, 7, 9);
7189 if (op == 6 || op == 7) /* BX or BLX */
7190 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7191 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7192 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7193 else
7194 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7195 dsc);
7196 }
7197 break;
7198 default: /* LDR (literal) */
7199 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7200 }
7201 break;
7202 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7204 break;
7205 case 10:
7206 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7207 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7208 else /* Generate SP-relative address */
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7210 break;
7211 case 11: /* Misc 16-bit instructions */
7212 {
7213 switch (bits (insn1, 8, 11))
7214 {
7215 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7216 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7217 break;
7218 case 12: case 13: /* POP */
7219 if (bit (insn1, 8)) /* PC is in register list. */
7220 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7221 else
7222 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7223 break;
7224 case 15: /* If-Then, and hints */
7225 if (bits (insn1, 0, 3))
7226 /* If-Then makes up to four following instructions conditional.
7227 IT instruction itself is not conditional, so handle it as a
7228 common unmodified instruction. */
7229 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7230 dsc);
7231 else
7232 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7233 break;
7234 default:
7235 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7236 }
7237 }
7238 break;
7239 case 12:
7240 if (op_bit_10_11 < 2) /* Store multiple registers */
7241 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7242 else /* Load multiple registers */
7243 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7244 break;
7245 case 13: /* Conditional branch and supervisor call */
7246 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7247 err = thumb_copy_b (gdbarch, insn1, dsc);
7248 else
7249 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7250 break;
7251 case 14: /* Unconditional branch */
7252 err = thumb_copy_b (gdbarch, insn1, dsc);
7253 break;
7254 default:
7255 err = 1;
7256 }
7257
7258 if (err)
7259 internal_error (__FILE__, __LINE__,
7260 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7261 }
7262
7263 static int
7264 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7265 uint16_t insn1, uint16_t insn2,
7266 struct regcache *regs,
7267 arm_displaced_step_closure *dsc)
7268 {
7269 int rt = bits (insn2, 12, 15);
7270 int rn = bits (insn1, 0, 3);
7271 int op1 = bits (insn1, 7, 8);
7272
7273 switch (bits (insn1, 5, 6))
7274 {
7275 case 0: /* Load byte and memory hints */
7276 if (rt == 0xf) /* PLD/PLI */
7277 {
7278 if (rn == 0xf)
7279 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7280 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7281 else
7282 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7283 "pli/pld", dsc);
7284 }
7285 else
7286 {
7287 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7288 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7289 1);
7290 else
7291 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7292 "ldrb{reg, immediate}/ldrbt",
7293 dsc);
7294 }
7295
7296 break;
7297 case 1: /* Load halfword and memory hints. */
7298 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7299 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7300 "pld/unalloc memhint", dsc);
7301 else
7302 {
7303 if (rn == 0xf)
7304 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7305 2);
7306 else
7307 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7308 "ldrh/ldrht", dsc);
7309 }
7310 break;
7311 case 2: /* Load word */
7312 {
7313 int insn2_bit_8_11 = bits (insn2, 8, 11);
7314
7315 if (rn == 0xf)
7316 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7317 else if (op1 == 0x1) /* Encoding T3 */
7318 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7319 0, 1);
7320 else /* op1 == 0x0 */
7321 {
7322 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7323 /* LDR (immediate) */
7324 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7325 dsc, bit (insn2, 8), 1);
7326 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7327 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7328 "ldrt", dsc);
7329 else
7330 /* LDR (register) */
7331 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7332 dsc, 0, 0);
7333 }
7334 break;
7335 }
7336 default:
7337 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7338 break;
7339 }
7340 return 0;
7341 }
7342
7343 static void
7344 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7345 uint16_t insn2, struct regcache *regs,
7346 arm_displaced_step_closure *dsc)
7347 {
7348 int err = 0;
7349 unsigned short op = bit (insn2, 15);
7350 unsigned int op1 = bits (insn1, 11, 12);
7351
7352 switch (op1)
7353 {
7354 case 1:
7355 {
7356 switch (bits (insn1, 9, 10))
7357 {
7358 case 0:
7359 if (bit (insn1, 6))
7360 {
7361 /* Load/store {dual, execlusive}, table branch. */
7362 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7363 && bits (insn2, 5, 7) == 0)
7364 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7365 dsc);
7366 else
7367 /* PC is not allowed to use in load/store {dual, exclusive}
7368 instructions. */
7369 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 "load/store dual/ex", dsc);
7371 }
7372 else /* load/store multiple */
7373 {
7374 switch (bits (insn1, 7, 8))
7375 {
7376 case 0: case 3: /* SRS, RFE */
7377 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7378 "srs/rfe", dsc);
7379 break;
7380 case 1: case 2: /* LDM/STM/PUSH/POP */
7381 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7382 break;
7383 }
7384 }
7385 break;
7386
7387 case 1:
7388 /* Data-processing (shift register). */
7389 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7390 dsc);
7391 break;
7392 default: /* Coprocessor instructions. */
7393 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7394 break;
7395 }
7396 break;
7397 }
7398 case 2: /* op1 = 2 */
7399 if (op) /* Branch and misc control. */
7400 {
7401 if (bit (insn2, 14) /* BLX/BL */
7402 || bit (insn2, 12) /* Unconditional branch */
7403 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7404 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7405 else
7406 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7407 "misc ctrl", dsc);
7408 }
7409 else
7410 {
7411 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7412 {
7413 int dp_op = bits (insn1, 4, 8);
7414 int rn = bits (insn1, 0, 3);
7415 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7416 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7417 regs, dsc);
7418 else
7419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "dp/pb", dsc);
7421 }
7422 else /* Data processing (modified immeidate) */
7423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "dp/mi", dsc);
7425 }
7426 break;
7427 case 3: /* op1 = 3 */
7428 switch (bits (insn1, 9, 10))
7429 {
7430 case 0:
7431 if (bit (insn1, 4))
7432 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7433 regs, dsc);
7434 else /* NEON Load/Store and Store single data item */
7435 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7436 "neon elt/struct load/store",
7437 dsc);
7438 break;
7439 case 1: /* op1 = 3, bits (9, 10) == 1 */
7440 switch (bits (insn1, 7, 8))
7441 {
7442 case 0: case 1: /* Data processing (register) */
7443 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7444 "dp(reg)", dsc);
7445 break;
7446 case 2: /* Multiply and absolute difference */
7447 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7448 "mul/mua/diff", dsc);
7449 break;
7450 case 3: /* Long multiply and divide */
7451 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7452 "lmul/lmua", dsc);
7453 break;
7454 }
7455 break;
7456 default: /* Coprocessor instructions */
7457 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7458 break;
7459 }
7460 break;
7461 default:
7462 err = 1;
7463 }
7464
7465 if (err)
7466 internal_error (__FILE__, __LINE__,
7467 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7468
7469 }
7470
7471 static void
7472 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7473 struct regcache *regs,
7474 arm_displaced_step_closure *dsc)
7475 {
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7477 uint16_t insn1
7478 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7479
7480 if (debug_displaced)
7481 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7482 "at %.8lx\n", insn1, (unsigned long) from);
7483
7484 dsc->is_thumb = 1;
7485 dsc->insn_size = thumb_insn_size (insn1);
7486 if (thumb_insn_size (insn1) == 4)
7487 {
7488 uint16_t insn2
7489 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7490 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7491 }
7492 else
7493 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7494 }
7495
7496 void
7497 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7498 CORE_ADDR to, struct regcache *regs,
7499 arm_displaced_step_closure *dsc)
7500 {
7501 int err = 0;
7502 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7503 uint32_t insn;
7504
7505 /* Most displaced instructions use a 1-instruction scratch space, so set this
7506 here and override below if/when necessary. */
7507 dsc->numinsns = 1;
7508 dsc->insn_addr = from;
7509 dsc->scratch_base = to;
7510 dsc->cleanup = NULL;
7511 dsc->wrote_to_pc = 0;
7512
7513 if (!displaced_in_arm_mode (regs))
7514 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7515
7516 dsc->is_thumb = 0;
7517 dsc->insn_size = 4;
7518 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7519 if (debug_displaced)
7520 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7521 "at %.8lx\n", (unsigned long) insn,
7522 (unsigned long) from);
7523
7524 if ((insn & 0xf0000000) == 0xf0000000)
7525 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7526 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7527 {
7528 case 0x0: case 0x1: case 0x2: case 0x3:
7529 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7530 break;
7531
7532 case 0x4: case 0x5: case 0x6:
7533 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7534 break;
7535
7536 case 0x7:
7537 err = arm_decode_media (gdbarch, insn, dsc);
7538 break;
7539
7540 case 0x8: case 0x9: case 0xa: case 0xb:
7541 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7542 break;
7543
7544 case 0xc: case 0xd: case 0xe: case 0xf:
7545 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7546 break;
7547 }
7548
7549 if (err)
7550 internal_error (__FILE__, __LINE__,
7551 _("arm_process_displaced_insn: Instruction decode error"));
7552 }
7553
7554 /* Actually set up the scratch space for a displaced instruction. */
7555
7556 void
7557 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7558 CORE_ADDR to, arm_displaced_step_closure *dsc)
7559 {
7560 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7561 unsigned int i, len, offset;
7562 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7563 int size = dsc->is_thumb? 2 : 4;
7564 const gdb_byte *bkp_insn;
7565
7566 offset = 0;
7567 /* Poke modified instruction(s). */
7568 for (i = 0; i < dsc->numinsns; i++)
7569 {
7570 if (debug_displaced)
7571 {
7572 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7573 if (size == 4)
7574 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7575 dsc->modinsn[i]);
7576 else if (size == 2)
7577 fprintf_unfiltered (gdb_stdlog, "%.4x",
7578 (unsigned short)dsc->modinsn[i]);
7579
7580 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7581 (unsigned long) to + offset);
7582
7583 }
7584 write_memory_unsigned_integer (to + offset, size,
7585 byte_order_for_code,
7586 dsc->modinsn[i]);
7587 offset += size;
7588 }
7589
7590 /* Choose the correct breakpoint instruction. */
7591 if (dsc->is_thumb)
7592 {
7593 bkp_insn = tdep->thumb_breakpoint;
7594 len = tdep->thumb_breakpoint_size;
7595 }
7596 else
7597 {
7598 bkp_insn = tdep->arm_breakpoint;
7599 len = tdep->arm_breakpoint_size;
7600 }
7601
7602 /* Put breakpoint afterwards. */
7603 write_memory (to + offset, bkp_insn, len);
7604
7605 if (debug_displaced)
7606 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7607 paddress (gdbarch, from), paddress (gdbarch, to));
7608 }
7609
7610 /* Entry point for cleaning things up after a displaced instruction has been
7611 single-stepped. */
7612
7613 void
7614 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7615 struct displaced_step_closure *dsc_,
7616 CORE_ADDR from, CORE_ADDR to,
7617 struct regcache *regs)
7618 {
7619 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7620
7621 if (dsc->cleanup)
7622 dsc->cleanup (gdbarch, regs, dsc);
7623
7624 if (!dsc->wrote_to_pc)
7625 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7626 dsc->insn_addr + dsc->insn_size);
7627
7628 }
7629
7630 #include "bfd-in2.h"
7631 #include "libcoff.h"
7632
7633 static int
7634 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7635 {
7636 gdb_disassembler *di
7637 = static_cast<gdb_disassembler *>(info->application_data);
7638 struct gdbarch *gdbarch = di->arch ();
7639
7640 if (arm_pc_is_thumb (gdbarch, memaddr))
7641 {
7642 static asymbol *asym;
7643 static combined_entry_type ce;
7644 static struct coff_symbol_struct csym;
7645 static struct bfd fake_bfd;
7646 static bfd_target fake_target;
7647
7648 if (csym.native == NULL)
7649 {
7650 /* Create a fake symbol vector containing a Thumb symbol.
7651 This is solely so that the code in print_insn_little_arm()
7652 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7653 the presence of a Thumb symbol and switch to decoding
7654 Thumb instructions. */
7655
7656 fake_target.flavour = bfd_target_coff_flavour;
7657 fake_bfd.xvec = &fake_target;
7658 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7659 csym.native = &ce;
7660 csym.symbol.the_bfd = &fake_bfd;
7661 csym.symbol.name = "fake";
7662 asym = (asymbol *) & csym;
7663 }
7664
7665 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7666 info->symbols = &asym;
7667 }
7668 else
7669 info->symbols = NULL;
7670
7671 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7672 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7673 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7674 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7675 in default_print_insn. */
7676 if (exec_bfd != NULL)
7677 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7678
7679 return default_print_insn (memaddr, info);
7680 }
7681
7682 /* The following define instruction sequences that will cause ARM
7683 cpu's to take an undefined instruction trap. These are used to
7684 signal a breakpoint to GDB.
7685
7686 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7687 modes. A different instruction is required for each mode. The ARM
7688 cpu's can also be big or little endian. Thus four different
7689 instructions are needed to support all cases.
7690
7691 Note: ARMv4 defines several new instructions that will take the
7692 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7693 not in fact add the new instructions. The new undefined
7694 instructions in ARMv4 are all instructions that had no defined
7695 behaviour in earlier chips. There is no guarantee that they will
7696 raise an exception, but may be treated as NOP's. In practice, it
7697 may only safe to rely on instructions matching:
7698
7699 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7700 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7701 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7702
7703 Even this may only true if the condition predicate is true. The
7704 following use a condition predicate of ALWAYS so it is always TRUE.
7705
7706 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7707 and NetBSD all use a software interrupt rather than an undefined
7708 instruction to force a trap. This can be handled by by the
7709 abi-specific code during establishment of the gdbarch vector. */
7710
7711 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7712 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7713 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7714 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7715
7716 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7717 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7718 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7719 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7720
7721 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7722
7723 static int
7724 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7725 {
7726 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7727 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7728
7729 if (arm_pc_is_thumb (gdbarch, *pcptr))
7730 {
7731 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7732
7733 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7734 check whether we are replacing a 32-bit instruction. */
7735 if (tdep->thumb2_breakpoint != NULL)
7736 {
7737 gdb_byte buf[2];
7738
7739 if (target_read_memory (*pcptr, buf, 2) == 0)
7740 {
7741 unsigned short inst1;
7742
7743 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7744 if (thumb_insn_size (inst1) == 4)
7745 return ARM_BP_KIND_THUMB2;
7746 }
7747 }
7748
7749 return ARM_BP_KIND_THUMB;
7750 }
7751 else
7752 return ARM_BP_KIND_ARM;
7753
7754 }
7755
7756 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7757
7758 static const gdb_byte *
7759 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7760 {
7761 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7762
7763 switch (kind)
7764 {
7765 case ARM_BP_KIND_ARM:
7766 *size = tdep->arm_breakpoint_size;
7767 return tdep->arm_breakpoint;
7768 case ARM_BP_KIND_THUMB:
7769 *size = tdep->thumb_breakpoint_size;
7770 return tdep->thumb_breakpoint;
7771 case ARM_BP_KIND_THUMB2:
7772 *size = tdep->thumb2_breakpoint_size;
7773 return tdep->thumb2_breakpoint;
7774 default:
7775 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7776 }
7777 }
7778
7779 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7780
7781 static int
7782 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7783 struct regcache *regcache,
7784 CORE_ADDR *pcptr)
7785 {
7786 gdb_byte buf[4];
7787
7788 /* Check the memory pointed by PC is readable. */
7789 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7790 {
7791 struct arm_get_next_pcs next_pcs_ctx;
7792
7793 arm_get_next_pcs_ctor (&next_pcs_ctx,
7794 &arm_get_next_pcs_ops,
7795 gdbarch_byte_order (gdbarch),
7796 gdbarch_byte_order_for_code (gdbarch),
7797 0,
7798 regcache);
7799
7800 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7801
7802 /* If MEMADDR is the next instruction of current pc, do the
7803 software single step computation, and get the thumb mode by
7804 the destination address. */
7805 for (CORE_ADDR pc : next_pcs)
7806 {
7807 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7808 {
7809 if (IS_THUMB_ADDR (pc))
7810 {
7811 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7812 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7813 }
7814 else
7815 return ARM_BP_KIND_ARM;
7816 }
7817 }
7818 }
7819
7820 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7821 }
7822
7823 /* Extract from an array REGBUF containing the (raw) register state a
7824 function return value of type TYPE, and copy that, in virtual
7825 format, into VALBUF. */
7826
7827 static void
7828 arm_extract_return_value (struct type *type, struct regcache *regs,
7829 gdb_byte *valbuf)
7830 {
7831 struct gdbarch *gdbarch = regs->arch ();
7832 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7833
7834 if (TYPE_CODE_FLT == TYPE_CODE (type))
7835 {
7836 switch (gdbarch_tdep (gdbarch)->fp_model)
7837 {
7838 case ARM_FLOAT_FPA:
7839 {
7840 /* The value is in register F0 in internal format. We need to
7841 extract the raw value and then convert it to the desired
7842 internal type. */
7843 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7844
7845 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7846 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7847 valbuf, type);
7848 }
7849 break;
7850
7851 case ARM_FLOAT_SOFT_FPA:
7852 case ARM_FLOAT_SOFT_VFP:
7853 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7854 not using the VFP ABI code. */
7855 case ARM_FLOAT_VFP:
7856 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7857 if (TYPE_LENGTH (type) > 4)
7858 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7859 break;
7860
7861 default:
7862 internal_error (__FILE__, __LINE__,
7863 _("arm_extract_return_value: "
7864 "Floating point model not supported"));
7865 break;
7866 }
7867 }
7868 else if (TYPE_CODE (type) == TYPE_CODE_INT
7869 || TYPE_CODE (type) == TYPE_CODE_CHAR
7870 || TYPE_CODE (type) == TYPE_CODE_BOOL
7871 || TYPE_CODE (type) == TYPE_CODE_PTR
7872 || TYPE_IS_REFERENCE (type)
7873 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7874 {
7875 /* If the type is a plain integer, then the access is
7876 straight-forward. Otherwise we have to play around a bit
7877 more. */
7878 int len = TYPE_LENGTH (type);
7879 int regno = ARM_A1_REGNUM;
7880 ULONGEST tmp;
7881
7882 while (len > 0)
7883 {
7884 /* By using store_unsigned_integer we avoid having to do
7885 anything special for small big-endian values. */
7886 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7887 store_unsigned_integer (valbuf,
7888 (len > INT_REGISTER_SIZE
7889 ? INT_REGISTER_SIZE : len),
7890 byte_order, tmp);
7891 len -= INT_REGISTER_SIZE;
7892 valbuf += INT_REGISTER_SIZE;
7893 }
7894 }
7895 else
7896 {
7897 /* For a structure or union the behaviour is as if the value had
7898 been stored to word-aligned memory and then loaded into
7899 registers with 32-bit load instruction(s). */
7900 int len = TYPE_LENGTH (type);
7901 int regno = ARM_A1_REGNUM;
7902 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7903
7904 while (len > 0)
7905 {
7906 regs->cooked_read (regno++, tmpbuf);
7907 memcpy (valbuf, tmpbuf,
7908 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7909 len -= INT_REGISTER_SIZE;
7910 valbuf += INT_REGISTER_SIZE;
7911 }
7912 }
7913 }
7914
7915
7916 /* Will a function return an aggregate type in memory or in a
7917 register? Return 0 if an aggregate type can be returned in a
7918 register, 1 if it must be returned in memory. */
7919
7920 static int
7921 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7922 {
7923 enum type_code code;
7924
7925 type = check_typedef (type);
7926
7927 /* Simple, non-aggregate types (ie not including vectors and
7928 complex) are always returned in a register (or registers). */
7929 code = TYPE_CODE (type);
7930 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7931 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7932 return 0;
7933
7934 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7935 {
7936 /* Vector values should be returned using ARM registers if they
7937 are not over 16 bytes. */
7938 return (TYPE_LENGTH (type) > 16);
7939 }
7940
7941 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7942 {
7943 /* The AAPCS says all aggregates not larger than a word are returned
7944 in a register. */
7945 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7946 return 0;
7947
7948 return 1;
7949 }
7950 else
7951 {
7952 int nRc;
7953
7954 /* All aggregate types that won't fit in a register must be returned
7955 in memory. */
7956 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7957 return 1;
7958
7959 /* In the ARM ABI, "integer" like aggregate types are returned in
7960 registers. For an aggregate type to be integer like, its size
7961 must be less than or equal to INT_REGISTER_SIZE and the
7962 offset of each addressable subfield must be zero. Note that bit
7963 fields are not addressable, and all addressable subfields of
7964 unions always start at offset zero.
7965
7966 This function is based on the behaviour of GCC 2.95.1.
7967 See: gcc/arm.c: arm_return_in_memory() for details.
7968
7969 Note: All versions of GCC before GCC 2.95.2 do not set up the
7970 parameters correctly for a function returning the following
7971 structure: struct { float f;}; This should be returned in memory,
7972 not a register. Richard Earnshaw sent me a patch, but I do not
7973 know of any way to detect if a function like the above has been
7974 compiled with the correct calling convention. */
7975
7976 /* Assume all other aggregate types can be returned in a register.
7977 Run a check for structures, unions and arrays. */
7978 nRc = 0;
7979
7980 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7981 {
7982 int i;
7983 /* Need to check if this struct/union is "integer" like. For
7984 this to be true, its size must be less than or equal to
7985 INT_REGISTER_SIZE and the offset of each addressable
7986 subfield must be zero. Note that bit fields are not
7987 addressable, and unions always start at offset zero. If any
7988 of the subfields is a floating point type, the struct/union
7989 cannot be an integer type. */
7990
7991 /* For each field in the object, check:
7992 1) Is it FP? --> yes, nRc = 1;
7993 2) Is it addressable (bitpos != 0) and
7994 not packed (bitsize == 0)?
7995 --> yes, nRc = 1
7996 */
7997
7998 for (i = 0; i < TYPE_NFIELDS (type); i++)
7999 {
8000 enum type_code field_type_code;
8001
8002 field_type_code
8003 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8004 i)));
8005
8006 /* Is it a floating point type field? */
8007 if (field_type_code == TYPE_CODE_FLT)
8008 {
8009 nRc = 1;
8010 break;
8011 }
8012
8013 /* If bitpos != 0, then we have to care about it. */
8014 if (TYPE_FIELD_BITPOS (type, i) != 0)
8015 {
8016 /* Bitfields are not addressable. If the field bitsize is
8017 zero, then the field is not packed. Hence it cannot be
8018 a bitfield or any other packed type. */
8019 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8020 {
8021 nRc = 1;
8022 break;
8023 }
8024 }
8025 }
8026 }
8027
8028 return nRc;
8029 }
8030 }
8031
8032 /* Write into appropriate registers a function return value of type
8033 TYPE, given in virtual format. */
8034
8035 static void
8036 arm_store_return_value (struct type *type, struct regcache *regs,
8037 const gdb_byte *valbuf)
8038 {
8039 struct gdbarch *gdbarch = regs->arch ();
8040 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8041
8042 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8043 {
8044 gdb_byte buf[FP_REGISTER_SIZE];
8045
8046 switch (gdbarch_tdep (gdbarch)->fp_model)
8047 {
8048 case ARM_FLOAT_FPA:
8049
8050 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8051 regs->cooked_write (ARM_F0_REGNUM, buf);
8052 break;
8053
8054 case ARM_FLOAT_SOFT_FPA:
8055 case ARM_FLOAT_SOFT_VFP:
8056 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8057 not using the VFP ABI code. */
8058 case ARM_FLOAT_VFP:
8059 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8060 if (TYPE_LENGTH (type) > 4)
8061 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8062 break;
8063
8064 default:
8065 internal_error (__FILE__, __LINE__,
8066 _("arm_store_return_value: Floating "
8067 "point model not supported"));
8068 break;
8069 }
8070 }
8071 else if (TYPE_CODE (type) == TYPE_CODE_INT
8072 || TYPE_CODE (type) == TYPE_CODE_CHAR
8073 || TYPE_CODE (type) == TYPE_CODE_BOOL
8074 || TYPE_CODE (type) == TYPE_CODE_PTR
8075 || TYPE_IS_REFERENCE (type)
8076 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8077 {
8078 if (TYPE_LENGTH (type) <= 4)
8079 {
8080 /* Values of one word or less are zero/sign-extended and
8081 returned in r0. */
8082 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8083 LONGEST val = unpack_long (type, valbuf);
8084
8085 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8086 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8087 }
8088 else
8089 {
8090 /* Integral values greater than one word are stored in consecutive
8091 registers starting with r0. This will always be a multiple of
8092 the regiser size. */
8093 int len = TYPE_LENGTH (type);
8094 int regno = ARM_A1_REGNUM;
8095
8096 while (len > 0)
8097 {
8098 regs->cooked_write (regno++, valbuf);
8099 len -= INT_REGISTER_SIZE;
8100 valbuf += INT_REGISTER_SIZE;
8101 }
8102 }
8103 }
8104 else
8105 {
8106 /* For a structure or union the behaviour is as if the value had
8107 been stored to word-aligned memory and then loaded into
8108 registers with 32-bit load instruction(s). */
8109 int len = TYPE_LENGTH (type);
8110 int regno = ARM_A1_REGNUM;
8111 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8112
8113 while (len > 0)
8114 {
8115 memcpy (tmpbuf, valbuf,
8116 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8117 regs->cooked_write (regno++, tmpbuf);
8118 len -= INT_REGISTER_SIZE;
8119 valbuf += INT_REGISTER_SIZE;
8120 }
8121 }
8122 }
8123
8124
8125 /* Handle function return values. */
8126
8127 static enum return_value_convention
8128 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8129 struct type *valtype, struct regcache *regcache,
8130 gdb_byte *readbuf, const gdb_byte *writebuf)
8131 {
8132 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8133 struct type *func_type = function ? value_type (function) : NULL;
8134 enum arm_vfp_cprc_base_type vfp_base_type;
8135 int vfp_base_count;
8136
8137 if (arm_vfp_abi_for_function (gdbarch, func_type)
8138 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8139 {
8140 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8141 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8142 int i;
8143 for (i = 0; i < vfp_base_count; i++)
8144 {
8145 if (reg_char == 'q')
8146 {
8147 if (writebuf)
8148 arm_neon_quad_write (gdbarch, regcache, i,
8149 writebuf + i * unit_length);
8150
8151 if (readbuf)
8152 arm_neon_quad_read (gdbarch, regcache, i,
8153 readbuf + i * unit_length);
8154 }
8155 else
8156 {
8157 char name_buf[4];
8158 int regnum;
8159
8160 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8161 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8162 strlen (name_buf));
8163 if (writebuf)
8164 regcache->cooked_write (regnum, writebuf + i * unit_length);
8165 if (readbuf)
8166 regcache->cooked_read (regnum, readbuf + i * unit_length);
8167 }
8168 }
8169 return RETURN_VALUE_REGISTER_CONVENTION;
8170 }
8171
8172 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8173 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8174 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8175 {
8176 if (tdep->struct_return == pcc_struct_return
8177 || arm_return_in_memory (gdbarch, valtype))
8178 return RETURN_VALUE_STRUCT_CONVENTION;
8179 }
8180 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8181 {
8182 if (arm_return_in_memory (gdbarch, valtype))
8183 return RETURN_VALUE_STRUCT_CONVENTION;
8184 }
8185
8186 if (writebuf)
8187 arm_store_return_value (valtype, regcache, writebuf);
8188
8189 if (readbuf)
8190 arm_extract_return_value (valtype, regcache, readbuf);
8191
8192 return RETURN_VALUE_REGISTER_CONVENTION;
8193 }
8194
8195
8196 static int
8197 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8198 {
8199 struct gdbarch *gdbarch = get_frame_arch (frame);
8200 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8201 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8202 CORE_ADDR jb_addr;
8203 gdb_byte buf[INT_REGISTER_SIZE];
8204
8205 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8206
8207 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8208 INT_REGISTER_SIZE))
8209 return 0;
8210
8211 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8212 return 1;
8213 }
8214
8215 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8216 return the target PC. Otherwise return 0. */
8217
8218 CORE_ADDR
8219 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8220 {
8221 const char *name;
8222 int namelen;
8223 CORE_ADDR start_addr;
8224
8225 /* Find the starting address and name of the function containing the PC. */
8226 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8227 {
8228 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8229 check here. */
8230 start_addr = arm_skip_bx_reg (frame, pc);
8231 if (start_addr != 0)
8232 return start_addr;
8233
8234 return 0;
8235 }
8236
8237 /* If PC is in a Thumb call or return stub, return the address of the
8238 target PC, which is in a register. The thunk functions are called
8239 _call_via_xx, where x is the register name. The possible names
8240 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8241 functions, named __ARM_call_via_r[0-7]. */
8242 if (startswith (name, "_call_via_")
8243 || startswith (name, "__ARM_call_via_"))
8244 {
8245 /* Use the name suffix to determine which register contains the
8246 target PC. */
8247 static const char *table[15] =
8248 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8249 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8250 };
8251 int regno;
8252 int offset = strlen (name) - 2;
8253
8254 for (regno = 0; regno <= 14; regno++)
8255 if (strcmp (&name[offset], table[regno]) == 0)
8256 return get_frame_register_unsigned (frame, regno);
8257 }
8258
8259 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8260 non-interworking calls to foo. We could decode the stubs
8261 to find the target but it's easier to use the symbol table. */
8262 namelen = strlen (name);
8263 if (name[0] == '_' && name[1] == '_'
8264 && ((namelen > 2 + strlen ("_from_thumb")
8265 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8266 || (namelen > 2 + strlen ("_from_arm")
8267 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8268 {
8269 char *target_name;
8270 int target_len = namelen - 2;
8271 struct bound_minimal_symbol minsym;
8272 struct objfile *objfile;
8273 struct obj_section *sec;
8274
8275 if (name[namelen - 1] == 'b')
8276 target_len -= strlen ("_from_thumb");
8277 else
8278 target_len -= strlen ("_from_arm");
8279
8280 target_name = (char *) alloca (target_len + 1);
8281 memcpy (target_name, name + 2, target_len);
8282 target_name[target_len] = '\0';
8283
8284 sec = find_pc_section (pc);
8285 objfile = (sec == NULL) ? NULL : sec->objfile;
8286 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8287 if (minsym.minsym != NULL)
8288 return BMSYMBOL_VALUE_ADDRESS (minsym);
8289 else
8290 return 0;
8291 }
8292
8293 return 0; /* not a stub */
8294 }
8295
8296 static void
8297 set_arm_command (const char *args, int from_tty)
8298 {
8299 printf_unfiltered (_("\
8300 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8301 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8302 }
8303
8304 static void
8305 show_arm_command (const char *args, int from_tty)
8306 {
8307 cmd_show_list (showarmcmdlist, from_tty, "");
8308 }
8309
8310 static void
8311 arm_update_current_architecture (void)
8312 {
8313 struct gdbarch_info info;
8314
8315 /* If the current architecture is not ARM, we have nothing to do. */
8316 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8317 return;
8318
8319 /* Update the architecture. */
8320 gdbarch_info_init (&info);
8321
8322 if (!gdbarch_update_p (info))
8323 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8324 }
8325
8326 static void
8327 set_fp_model_sfunc (const char *args, int from_tty,
8328 struct cmd_list_element *c)
8329 {
8330 int fp_model;
8331
8332 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8333 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8334 {
8335 arm_fp_model = (enum arm_float_model) fp_model;
8336 break;
8337 }
8338
8339 if (fp_model == ARM_FLOAT_LAST)
8340 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8341 current_fp_model);
8342
8343 arm_update_current_architecture ();
8344 }
8345
8346 static void
8347 show_fp_model (struct ui_file *file, int from_tty,
8348 struct cmd_list_element *c, const char *value)
8349 {
8350 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8351
8352 if (arm_fp_model == ARM_FLOAT_AUTO
8353 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8354 fprintf_filtered (file, _("\
8355 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8356 fp_model_strings[tdep->fp_model]);
8357 else
8358 fprintf_filtered (file, _("\
8359 The current ARM floating point model is \"%s\".\n"),
8360 fp_model_strings[arm_fp_model]);
8361 }
8362
8363 static void
8364 arm_set_abi (const char *args, int from_tty,
8365 struct cmd_list_element *c)
8366 {
8367 int arm_abi;
8368
8369 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8370 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8371 {
8372 arm_abi_global = (enum arm_abi_kind) arm_abi;
8373 break;
8374 }
8375
8376 if (arm_abi == ARM_ABI_LAST)
8377 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8378 arm_abi_string);
8379
8380 arm_update_current_architecture ();
8381 }
8382
8383 static void
8384 arm_show_abi (struct ui_file *file, int from_tty,
8385 struct cmd_list_element *c, const char *value)
8386 {
8387 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8388
8389 if (arm_abi_global == ARM_ABI_AUTO
8390 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8391 fprintf_filtered (file, _("\
8392 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8393 arm_abi_strings[tdep->arm_abi]);
8394 else
8395 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8396 arm_abi_string);
8397 }
8398
8399 static void
8400 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8401 struct cmd_list_element *c, const char *value)
8402 {
8403 fprintf_filtered (file,
8404 _("The current execution mode assumed "
8405 "(when symbols are unavailable) is \"%s\".\n"),
8406 arm_fallback_mode_string);
8407 }
8408
8409 static void
8410 arm_show_force_mode (struct ui_file *file, int from_tty,
8411 struct cmd_list_element *c, const char *value)
8412 {
8413 fprintf_filtered (file,
8414 _("The current execution mode assumed "
8415 "(even when symbols are available) is \"%s\".\n"),
8416 arm_force_mode_string);
8417 }
8418
8419 /* If the user changes the register disassembly style used for info
8420 register and other commands, we have to also switch the style used
8421 in opcodes for disassembly output. This function is run in the "set
8422 arm disassembly" command, and does that. */
8423
8424 static void
8425 set_disassembly_style_sfunc (const char *args, int from_tty,
8426 struct cmd_list_element *c)
8427 {
8428 /* Convert the short style name into the long style name (eg, reg-names-*)
8429 before calling the generic set_disassembler_options() function. */
8430 std::string long_name = std::string ("reg-names-") + disassembly_style;
8431 set_disassembler_options (&long_name[0]);
8432 }
8433
8434 static void
8435 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8436 struct cmd_list_element *c, const char *value)
8437 {
8438 struct gdbarch *gdbarch = get_current_arch ();
8439 char *options = get_disassembler_options (gdbarch);
8440 const char *style = "";
8441 int len = 0;
8442 const char *opt;
8443
8444 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8445 if (CONST_STRNEQ (opt, "reg-names-"))
8446 {
8447 style = &opt[strlen ("reg-names-")];
8448 len = strcspn (style, ",");
8449 }
8450
8451 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8452 }
8453 \f
8454 /* Return the ARM register name corresponding to register I. */
8455 static const char *
8456 arm_register_name (struct gdbarch *gdbarch, int i)
8457 {
8458 const int num_regs = gdbarch_num_regs (gdbarch);
8459
8460 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8461 && i >= num_regs && i < num_regs + 32)
8462 {
8463 static const char *const vfp_pseudo_names[] = {
8464 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8465 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8466 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8467 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8468 };
8469
8470 return vfp_pseudo_names[i - num_regs];
8471 }
8472
8473 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8474 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8475 {
8476 static const char *const neon_pseudo_names[] = {
8477 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8478 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8479 };
8480
8481 return neon_pseudo_names[i - num_regs - 32];
8482 }
8483
8484 if (i >= ARRAY_SIZE (arm_register_names))
8485 /* These registers are only supported on targets which supply
8486 an XML description. */
8487 return "";
8488
8489 return arm_register_names[i];
8490 }
8491
8492 /* Test whether the coff symbol specific value corresponds to a Thumb
8493 function. */
8494
8495 static int
8496 coff_sym_is_thumb (int val)
8497 {
8498 return (val == C_THUMBEXT
8499 || val == C_THUMBSTAT
8500 || val == C_THUMBEXTFUNC
8501 || val == C_THUMBSTATFUNC
8502 || val == C_THUMBLABEL);
8503 }
8504
8505 /* arm_coff_make_msymbol_special()
8506 arm_elf_make_msymbol_special()
8507
8508 These functions test whether the COFF or ELF symbol corresponds to
8509 an address in thumb code, and set a "special" bit in a minimal
8510 symbol to indicate that it does. */
8511
8512 static void
8513 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8514 {
8515 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8516
8517 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8518 == ST_BRANCH_TO_THUMB)
8519 MSYMBOL_SET_SPECIAL (msym);
8520 }
8521
8522 static void
8523 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8524 {
8525 if (coff_sym_is_thumb (val))
8526 MSYMBOL_SET_SPECIAL (msym);
8527 }
8528
8529 static void
8530 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8531 asymbol *sym)
8532 {
8533 const char *name = bfd_asymbol_name (sym);
8534 struct arm_per_objfile *data;
8535 struct arm_mapping_symbol new_map_sym;
8536
8537 gdb_assert (name[0] == '$');
8538 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8539 return;
8540
8541 data = arm_objfile_data_key.get (objfile);
8542 if (data == NULL)
8543 data = arm_objfile_data_key.emplace (objfile,
8544 objfile->obfd->section_count);
8545 arm_mapping_symbol_vec &map
8546 = data->section_maps[bfd_get_section (sym)->index];
8547
8548 new_map_sym.value = sym->value;
8549 new_map_sym.type = name[1];
8550
8551 /* Insert at the end, the vector will be sorted on first use. */
8552 map.push_back (new_map_sym);
8553 }
8554
8555 static void
8556 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8557 {
8558 struct gdbarch *gdbarch = regcache->arch ();
8559 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8560
8561 /* If necessary, set the T bit. */
8562 if (arm_apcs_32)
8563 {
8564 ULONGEST val, t_bit;
8565 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8566 t_bit = arm_psr_thumb_bit (gdbarch);
8567 if (arm_pc_is_thumb (gdbarch, pc))
8568 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8569 val | t_bit);
8570 else
8571 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8572 val & ~t_bit);
8573 }
8574 }
8575
8576 /* Read the contents of a NEON quad register, by reading from two
8577 double registers. This is used to implement the quad pseudo
8578 registers, and for argument passing in case the quad registers are
8579 missing; vectors are passed in quad registers when using the VFP
8580 ABI, even if a NEON unit is not present. REGNUM is the index of
8581 the quad register, in [0, 15]. */
8582
8583 static enum register_status
8584 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8585 int regnum, gdb_byte *buf)
8586 {
8587 char name_buf[4];
8588 gdb_byte reg_buf[8];
8589 int offset, double_regnum;
8590 enum register_status status;
8591
8592 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8593 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8594 strlen (name_buf));
8595
8596 /* d0 is always the least significant half of q0. */
8597 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8598 offset = 8;
8599 else
8600 offset = 0;
8601
8602 status = regcache->raw_read (double_regnum, reg_buf);
8603 if (status != REG_VALID)
8604 return status;
8605 memcpy (buf + offset, reg_buf, 8);
8606
8607 offset = 8 - offset;
8608 status = regcache->raw_read (double_regnum + 1, reg_buf);
8609 if (status != REG_VALID)
8610 return status;
8611 memcpy (buf + offset, reg_buf, 8);
8612
8613 return REG_VALID;
8614 }
8615
8616 static enum register_status
8617 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8618 int regnum, gdb_byte *buf)
8619 {
8620 const int num_regs = gdbarch_num_regs (gdbarch);
8621 char name_buf[4];
8622 gdb_byte reg_buf[8];
8623 int offset, double_regnum;
8624
8625 gdb_assert (regnum >= num_regs);
8626 regnum -= num_regs;
8627
8628 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8629 /* Quad-precision register. */
8630 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8631 else
8632 {
8633 enum register_status status;
8634
8635 /* Single-precision register. */
8636 gdb_assert (regnum < 32);
8637
8638 /* s0 is always the least significant half of d0. */
8639 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8640 offset = (regnum & 1) ? 0 : 4;
8641 else
8642 offset = (regnum & 1) ? 4 : 0;
8643
8644 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8645 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8646 strlen (name_buf));
8647
8648 status = regcache->raw_read (double_regnum, reg_buf);
8649 if (status == REG_VALID)
8650 memcpy (buf, reg_buf + offset, 4);
8651 return status;
8652 }
8653 }
8654
8655 /* Store the contents of BUF to a NEON quad register, by writing to
8656 two double registers. This is used to implement the quad pseudo
8657 registers, and for argument passing in case the quad registers are
8658 missing; vectors are passed in quad registers when using the VFP
8659 ABI, even if a NEON unit is not present. REGNUM is the index
8660 of the quad register, in [0, 15]. */
8661
8662 static void
8663 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8664 int regnum, const gdb_byte *buf)
8665 {
8666 char name_buf[4];
8667 int offset, double_regnum;
8668
8669 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8670 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8671 strlen (name_buf));
8672
8673 /* d0 is always the least significant half of q0. */
8674 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8675 offset = 8;
8676 else
8677 offset = 0;
8678
8679 regcache->raw_write (double_regnum, buf + offset);
8680 offset = 8 - offset;
8681 regcache->raw_write (double_regnum + 1, buf + offset);
8682 }
8683
8684 static void
8685 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8686 int regnum, const gdb_byte *buf)
8687 {
8688 const int num_regs = gdbarch_num_regs (gdbarch);
8689 char name_buf[4];
8690 gdb_byte reg_buf[8];
8691 int offset, double_regnum;
8692
8693 gdb_assert (regnum >= num_regs);
8694 regnum -= num_regs;
8695
8696 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8697 /* Quad-precision register. */
8698 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8699 else
8700 {
8701 /* Single-precision register. */
8702 gdb_assert (regnum < 32);
8703
8704 /* s0 is always the least significant half of d0. */
8705 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8706 offset = (regnum & 1) ? 0 : 4;
8707 else
8708 offset = (regnum & 1) ? 4 : 0;
8709
8710 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8711 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8712 strlen (name_buf));
8713
8714 regcache->raw_read (double_regnum, reg_buf);
8715 memcpy (reg_buf + offset, buf, 4);
8716 regcache->raw_write (double_regnum, reg_buf);
8717 }
8718 }
8719
8720 static struct value *
8721 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8722 {
8723 const int *reg_p = (const int *) baton;
8724 return value_of_register (*reg_p, frame);
8725 }
8726 \f
8727 static enum gdb_osabi
8728 arm_elf_osabi_sniffer (bfd *abfd)
8729 {
8730 unsigned int elfosabi;
8731 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8732
8733 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8734
8735 if (elfosabi == ELFOSABI_ARM)
8736 /* GNU tools use this value. Check note sections in this case,
8737 as well. */
8738 bfd_map_over_sections (abfd,
8739 generic_elf_osabi_sniff_abi_tag_sections,
8740 &osabi);
8741
8742 /* Anything else will be handled by the generic ELF sniffer. */
8743 return osabi;
8744 }
8745
8746 static int
8747 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8748 struct reggroup *group)
8749 {
8750 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8751 this, FPS register belongs to save_regroup, restore_reggroup, and
8752 all_reggroup, of course. */
8753 if (regnum == ARM_FPS_REGNUM)
8754 return (group == float_reggroup
8755 || group == save_reggroup
8756 || group == restore_reggroup
8757 || group == all_reggroup);
8758 else
8759 return default_register_reggroup_p (gdbarch, regnum, group);
8760 }
8761
8762 \f
8763 /* For backward-compatibility we allow two 'g' packet lengths with
8764 the remote protocol depending on whether FPA registers are
8765 supplied. M-profile targets do not have FPA registers, but some
8766 stubs already exist in the wild which use a 'g' packet which
8767 supplies them albeit with dummy values. The packet format which
8768 includes FPA registers should be considered deprecated for
8769 M-profile targets. */
8770
8771 static void
8772 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8773 {
8774 if (gdbarch_tdep (gdbarch)->is_m)
8775 {
8776 /* If we know from the executable this is an M-profile target,
8777 cater for remote targets whose register set layout is the
8778 same as the FPA layout. */
8779 register_remote_g_packet_guess (gdbarch,
8780 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8781 (16 * INT_REGISTER_SIZE)
8782 + (8 * FP_REGISTER_SIZE)
8783 + (2 * INT_REGISTER_SIZE),
8784 tdesc_arm_with_m_fpa_layout);
8785
8786 /* The regular M-profile layout. */
8787 register_remote_g_packet_guess (gdbarch,
8788 /* r0-r12,sp,lr,pc; xpsr */
8789 (16 * INT_REGISTER_SIZE)
8790 + INT_REGISTER_SIZE,
8791 tdesc_arm_with_m);
8792
8793 /* M-profile plus M4F VFP. */
8794 register_remote_g_packet_guess (gdbarch,
8795 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8796 (16 * INT_REGISTER_SIZE)
8797 + (16 * VFP_REGISTER_SIZE)
8798 + (2 * INT_REGISTER_SIZE),
8799 tdesc_arm_with_m_vfp_d16);
8800 }
8801
8802 /* Otherwise we don't have a useful guess. */
8803 }
8804
8805 /* Implement the code_of_frame_writable gdbarch method. */
8806
8807 static int
8808 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8809 {
8810 if (gdbarch_tdep (gdbarch)->is_m
8811 && get_frame_type (frame) == SIGTRAMP_FRAME)
8812 {
8813 /* M-profile exception frames return to some magic PCs, where
8814 isn't writable at all. */
8815 return 0;
8816 }
8817 else
8818 return 1;
8819 }
8820
8821 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8822 to be postfixed by a version (eg armv7hl). */
8823
8824 static const char *
8825 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8826 {
8827 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8828 return "arm(v[^- ]*)?";
8829 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8830 }
8831
8832 /* Initialize the current architecture based on INFO. If possible,
8833 re-use an architecture from ARCHES, which is a list of
8834 architectures already created during this debugging session.
8835
8836 Called e.g. at program startup, when reading a core file, and when
8837 reading a binary file. */
8838
8839 static struct gdbarch *
8840 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8841 {
8842 struct gdbarch_tdep *tdep;
8843 struct gdbarch *gdbarch;
8844 struct gdbarch_list *best_arch;
8845 enum arm_abi_kind arm_abi = arm_abi_global;
8846 enum arm_float_model fp_model = arm_fp_model;
8847 struct tdesc_arch_data *tdesc_data = NULL;
8848 int i, is_m = 0;
8849 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8850 int have_wmmx_registers = 0;
8851 int have_neon = 0;
8852 int have_fpa_registers = 1;
8853 const struct target_desc *tdesc = info.target_desc;
8854
8855 /* If we have an object to base this architecture on, try to determine
8856 its ABI. */
8857
8858 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8859 {
8860 int ei_osabi, e_flags;
8861
8862 switch (bfd_get_flavour (info.abfd))
8863 {
8864 case bfd_target_coff_flavour:
8865 /* Assume it's an old APCS-style ABI. */
8866 /* XXX WinCE? */
8867 arm_abi = ARM_ABI_APCS;
8868 break;
8869
8870 case bfd_target_elf_flavour:
8871 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8872 e_flags = elf_elfheader (info.abfd)->e_flags;
8873
8874 if (ei_osabi == ELFOSABI_ARM)
8875 {
8876 /* GNU tools used to use this value, but do not for EABI
8877 objects. There's nowhere to tag an EABI version
8878 anyway, so assume APCS. */
8879 arm_abi = ARM_ABI_APCS;
8880 }
8881 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8882 {
8883 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8884
8885 switch (eabi_ver)
8886 {
8887 case EF_ARM_EABI_UNKNOWN:
8888 /* Assume GNU tools. */
8889 arm_abi = ARM_ABI_APCS;
8890 break;
8891
8892 case EF_ARM_EABI_VER4:
8893 case EF_ARM_EABI_VER5:
8894 arm_abi = ARM_ABI_AAPCS;
8895 /* EABI binaries default to VFP float ordering.
8896 They may also contain build attributes that can
8897 be used to identify if the VFP argument-passing
8898 ABI is in use. */
8899 if (fp_model == ARM_FLOAT_AUTO)
8900 {
8901 #ifdef HAVE_ELF
8902 switch (bfd_elf_get_obj_attr_int (info.abfd,
8903 OBJ_ATTR_PROC,
8904 Tag_ABI_VFP_args))
8905 {
8906 case AEABI_VFP_args_base:
8907 /* "The user intended FP parameter/result
8908 passing to conform to AAPCS, base
8909 variant". */
8910 fp_model = ARM_FLOAT_SOFT_VFP;
8911 break;
8912 case AEABI_VFP_args_vfp:
8913 /* "The user intended FP parameter/result
8914 passing to conform to AAPCS, VFP
8915 variant". */
8916 fp_model = ARM_FLOAT_VFP;
8917 break;
8918 case AEABI_VFP_args_toolchain:
8919 /* "The user intended FP parameter/result
8920 passing to conform to tool chain-specific
8921 conventions" - we don't know any such
8922 conventions, so leave it as "auto". */
8923 break;
8924 case AEABI_VFP_args_compatible:
8925 /* "Code is compatible with both the base
8926 and VFP variants; the user did not permit
8927 non-variadic functions to pass FP
8928 parameters/results" - leave it as
8929 "auto". */
8930 break;
8931 default:
8932 /* Attribute value not mentioned in the
8933 November 2012 ABI, so leave it as
8934 "auto". */
8935 break;
8936 }
8937 #else
8938 fp_model = ARM_FLOAT_SOFT_VFP;
8939 #endif
8940 }
8941 break;
8942
8943 default:
8944 /* Leave it as "auto". */
8945 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8946 break;
8947 }
8948
8949 #ifdef HAVE_ELF
8950 /* Detect M-profile programs. This only works if the
8951 executable file includes build attributes; GCC does
8952 copy them to the executable, but e.g. RealView does
8953 not. */
8954 int attr_arch
8955 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8956 Tag_CPU_arch);
8957 int attr_profile
8958 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8959 Tag_CPU_arch_profile);
8960
8961 /* GCC specifies the profile for v6-M; RealView only
8962 specifies the profile for architectures starting with
8963 V7 (as opposed to architectures with a tag
8964 numerically greater than TAG_CPU_ARCH_V7). */
8965 if (!tdesc_has_registers (tdesc)
8966 && (attr_arch == TAG_CPU_ARCH_V6_M
8967 || attr_arch == TAG_CPU_ARCH_V6S_M
8968 || attr_profile == 'M'))
8969 is_m = 1;
8970 #endif
8971 }
8972
8973 if (fp_model == ARM_FLOAT_AUTO)
8974 {
8975 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8976 {
8977 case 0:
8978 /* Leave it as "auto". Strictly speaking this case
8979 means FPA, but almost nobody uses that now, and
8980 many toolchains fail to set the appropriate bits
8981 for the floating-point model they use. */
8982 break;
8983 case EF_ARM_SOFT_FLOAT:
8984 fp_model = ARM_FLOAT_SOFT_FPA;
8985 break;
8986 case EF_ARM_VFP_FLOAT:
8987 fp_model = ARM_FLOAT_VFP;
8988 break;
8989 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8990 fp_model = ARM_FLOAT_SOFT_VFP;
8991 break;
8992 }
8993 }
8994
8995 if (e_flags & EF_ARM_BE8)
8996 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8997
8998 break;
8999
9000 default:
9001 /* Leave it as "auto". */
9002 break;
9003 }
9004 }
9005
9006 /* Check any target description for validity. */
9007 if (tdesc_has_registers (tdesc))
9008 {
9009 /* For most registers we require GDB's default names; but also allow
9010 the numeric names for sp / lr / pc, as a convenience. */
9011 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9012 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9013 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9014
9015 const struct tdesc_feature *feature;
9016 int valid_p;
9017
9018 feature = tdesc_find_feature (tdesc,
9019 "org.gnu.gdb.arm.core");
9020 if (feature == NULL)
9021 {
9022 feature = tdesc_find_feature (tdesc,
9023 "org.gnu.gdb.arm.m-profile");
9024 if (feature == NULL)
9025 return NULL;
9026 else
9027 is_m = 1;
9028 }
9029
9030 tdesc_data = tdesc_data_alloc ();
9031
9032 valid_p = 1;
9033 for (i = 0; i < ARM_SP_REGNUM; i++)
9034 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9035 arm_register_names[i]);
9036 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9037 ARM_SP_REGNUM,
9038 arm_sp_names);
9039 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9040 ARM_LR_REGNUM,
9041 arm_lr_names);
9042 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9043 ARM_PC_REGNUM,
9044 arm_pc_names);
9045 if (is_m)
9046 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9047 ARM_PS_REGNUM, "xpsr");
9048 else
9049 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9050 ARM_PS_REGNUM, "cpsr");
9051
9052 if (!valid_p)
9053 {
9054 tdesc_data_cleanup (tdesc_data);
9055 return NULL;
9056 }
9057
9058 feature = tdesc_find_feature (tdesc,
9059 "org.gnu.gdb.arm.fpa");
9060 if (feature != NULL)
9061 {
9062 valid_p = 1;
9063 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9064 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9065 arm_register_names[i]);
9066 if (!valid_p)
9067 {
9068 tdesc_data_cleanup (tdesc_data);
9069 return NULL;
9070 }
9071 }
9072 else
9073 have_fpa_registers = 0;
9074
9075 feature = tdesc_find_feature (tdesc,
9076 "org.gnu.gdb.xscale.iwmmxt");
9077 if (feature != NULL)
9078 {
9079 static const char *const iwmmxt_names[] = {
9080 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9081 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9082 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9083 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9084 };
9085
9086 valid_p = 1;
9087 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9088 valid_p
9089 &= tdesc_numbered_register (feature, tdesc_data, i,
9090 iwmmxt_names[i - ARM_WR0_REGNUM]);
9091
9092 /* Check for the control registers, but do not fail if they
9093 are missing. */
9094 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9095 tdesc_numbered_register (feature, tdesc_data, i,
9096 iwmmxt_names[i - ARM_WR0_REGNUM]);
9097
9098 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9099 valid_p
9100 &= tdesc_numbered_register (feature, tdesc_data, i,
9101 iwmmxt_names[i - ARM_WR0_REGNUM]);
9102
9103 if (!valid_p)
9104 {
9105 tdesc_data_cleanup (tdesc_data);
9106 return NULL;
9107 }
9108
9109 have_wmmx_registers = 1;
9110 }
9111
9112 /* If we have a VFP unit, check whether the single precision registers
9113 are present. If not, then we will synthesize them as pseudo
9114 registers. */
9115 feature = tdesc_find_feature (tdesc,
9116 "org.gnu.gdb.arm.vfp");
9117 if (feature != NULL)
9118 {
9119 static const char *const vfp_double_names[] = {
9120 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9121 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9122 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9123 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9124 };
9125
9126 /* Require the double precision registers. There must be either
9127 16 or 32. */
9128 valid_p = 1;
9129 for (i = 0; i < 32; i++)
9130 {
9131 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9132 ARM_D0_REGNUM + i,
9133 vfp_double_names[i]);
9134 if (!valid_p)
9135 break;
9136 }
9137 if (!valid_p && i == 16)
9138 valid_p = 1;
9139
9140 /* Also require FPSCR. */
9141 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9142 ARM_FPSCR_REGNUM, "fpscr");
9143 if (!valid_p)
9144 {
9145 tdesc_data_cleanup (tdesc_data);
9146 return NULL;
9147 }
9148
9149 if (tdesc_unnumbered_register (feature, "s0") == 0)
9150 have_vfp_pseudos = 1;
9151
9152 vfp_register_count = i;
9153
9154 /* If we have VFP, also check for NEON. The architecture allows
9155 NEON without VFP (integer vector operations only), but GDB
9156 does not support that. */
9157 feature = tdesc_find_feature (tdesc,
9158 "org.gnu.gdb.arm.neon");
9159 if (feature != NULL)
9160 {
9161 /* NEON requires 32 double-precision registers. */
9162 if (i != 32)
9163 {
9164 tdesc_data_cleanup (tdesc_data);
9165 return NULL;
9166 }
9167
9168 /* If there are quad registers defined by the stub, use
9169 their type; otherwise (normally) provide them with
9170 the default type. */
9171 if (tdesc_unnumbered_register (feature, "q0") == 0)
9172 have_neon_pseudos = 1;
9173
9174 have_neon = 1;
9175 }
9176 }
9177 }
9178
9179 /* If there is already a candidate, use it. */
9180 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9181 best_arch != NULL;
9182 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9183 {
9184 if (arm_abi != ARM_ABI_AUTO
9185 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9186 continue;
9187
9188 if (fp_model != ARM_FLOAT_AUTO
9189 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9190 continue;
9191
9192 /* There are various other properties in tdep that we do not
9193 need to check here: those derived from a target description,
9194 since gdbarches with a different target description are
9195 automatically disqualified. */
9196
9197 /* Do check is_m, though, since it might come from the binary. */
9198 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9199 continue;
9200
9201 /* Found a match. */
9202 break;
9203 }
9204
9205 if (best_arch != NULL)
9206 {
9207 if (tdesc_data != NULL)
9208 tdesc_data_cleanup (tdesc_data);
9209 return best_arch->gdbarch;
9210 }
9211
9212 tdep = XCNEW (struct gdbarch_tdep);
9213 gdbarch = gdbarch_alloc (&info, tdep);
9214
9215 /* Record additional information about the architecture we are defining.
9216 These are gdbarch discriminators, like the OSABI. */
9217 tdep->arm_abi = arm_abi;
9218 tdep->fp_model = fp_model;
9219 tdep->is_m = is_m;
9220 tdep->have_fpa_registers = have_fpa_registers;
9221 tdep->have_wmmx_registers = have_wmmx_registers;
9222 gdb_assert (vfp_register_count == 0
9223 || vfp_register_count == 16
9224 || vfp_register_count == 32);
9225 tdep->vfp_register_count = vfp_register_count;
9226 tdep->have_vfp_pseudos = have_vfp_pseudos;
9227 tdep->have_neon_pseudos = have_neon_pseudos;
9228 tdep->have_neon = have_neon;
9229
9230 arm_register_g_packet_guesses (gdbarch);
9231
9232 /* Breakpoints. */
9233 switch (info.byte_order_for_code)
9234 {
9235 case BFD_ENDIAN_BIG:
9236 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9237 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9238 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9239 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9240
9241 break;
9242
9243 case BFD_ENDIAN_LITTLE:
9244 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9245 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9246 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9247 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9248
9249 break;
9250
9251 default:
9252 internal_error (__FILE__, __LINE__,
9253 _("arm_gdbarch_init: bad byte order for float format"));
9254 }
9255
9256 /* On ARM targets char defaults to unsigned. */
9257 set_gdbarch_char_signed (gdbarch, 0);
9258
9259 /* wchar_t is unsigned under the AAPCS. */
9260 if (tdep->arm_abi == ARM_ABI_AAPCS)
9261 set_gdbarch_wchar_signed (gdbarch, 0);
9262 else
9263 set_gdbarch_wchar_signed (gdbarch, 1);
9264
9265 /* Compute type alignment. */
9266 set_gdbarch_type_align (gdbarch, arm_type_align);
9267
9268 /* Note: for displaced stepping, this includes the breakpoint, and one word
9269 of additional scratch space. This setting isn't used for anything beside
9270 displaced stepping at present. */
9271 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9272
9273 /* This should be low enough for everything. */
9274 tdep->lowest_pc = 0x20;
9275 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9276
9277 /* The default, for both APCS and AAPCS, is to return small
9278 structures in registers. */
9279 tdep->struct_return = reg_struct_return;
9280
9281 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9282 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9283
9284 if (is_m)
9285 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9286
9287 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9288
9289 frame_base_set_default (gdbarch, &arm_normal_base);
9290
9291 /* Address manipulation. */
9292 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9293
9294 /* Advance PC across function entry code. */
9295 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9296
9297 /* Detect whether PC is at a point where the stack has been destroyed. */
9298 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9299
9300 /* Skip trampolines. */
9301 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9302
9303 /* The stack grows downward. */
9304 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9305
9306 /* Breakpoint manipulation. */
9307 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9308 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9309 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9310 arm_breakpoint_kind_from_current_state);
9311
9312 /* Information about registers, etc. */
9313 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9314 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9315 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9316 set_gdbarch_register_type (gdbarch, arm_register_type);
9317 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9318
9319 /* This "info float" is FPA-specific. Use the generic version if we
9320 do not have FPA. */
9321 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9322 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9323
9324 /* Internal <-> external register number maps. */
9325 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9326 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9327
9328 set_gdbarch_register_name (gdbarch, arm_register_name);
9329
9330 /* Returning results. */
9331 set_gdbarch_return_value (gdbarch, arm_return_value);
9332
9333 /* Disassembly. */
9334 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9335
9336 /* Minsymbol frobbing. */
9337 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9338 set_gdbarch_coff_make_msymbol_special (gdbarch,
9339 arm_coff_make_msymbol_special);
9340 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9341
9342 /* Thumb-2 IT block support. */
9343 set_gdbarch_adjust_breakpoint_address (gdbarch,
9344 arm_adjust_breakpoint_address);
9345
9346 /* Virtual tables. */
9347 set_gdbarch_vbit_in_delta (gdbarch, 1);
9348
9349 /* Hook in the ABI-specific overrides, if they have been registered. */
9350 gdbarch_init_osabi (info, gdbarch);
9351
9352 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9353
9354 /* Add some default predicates. */
9355 if (is_m)
9356 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9357 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9358 dwarf2_append_unwinders (gdbarch);
9359 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9360 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9361 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9362
9363 /* Now we have tuned the configuration, set a few final things,
9364 based on what the OS ABI has told us. */
9365
9366 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9367 binaries are always marked. */
9368 if (tdep->arm_abi == ARM_ABI_AUTO)
9369 tdep->arm_abi = ARM_ABI_APCS;
9370
9371 /* Watchpoints are not steppable. */
9372 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9373
9374 /* We used to default to FPA for generic ARM, but almost nobody
9375 uses that now, and we now provide a way for the user to force
9376 the model. So default to the most useful variant. */
9377 if (tdep->fp_model == ARM_FLOAT_AUTO)
9378 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9379
9380 if (tdep->jb_pc >= 0)
9381 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9382
9383 /* Floating point sizes and format. */
9384 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9385 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9386 {
9387 set_gdbarch_double_format
9388 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9389 set_gdbarch_long_double_format
9390 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9391 }
9392 else
9393 {
9394 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9395 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9396 }
9397
9398 if (have_vfp_pseudos)
9399 {
9400 /* NOTE: These are the only pseudo registers used by
9401 the ARM target at the moment. If more are added, a
9402 little more care in numbering will be needed. */
9403
9404 int num_pseudos = 32;
9405 if (have_neon_pseudos)
9406 num_pseudos += 16;
9407 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9408 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9409 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9410 }
9411
9412 if (tdesc_data)
9413 {
9414 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9415
9416 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9417
9418 /* Override tdesc_register_type to adjust the types of VFP
9419 registers for NEON. */
9420 set_gdbarch_register_type (gdbarch, arm_register_type);
9421 }
9422
9423 /* Add standard register aliases. We add aliases even for those
9424 nanes which are used by the current architecture - it's simpler,
9425 and does no harm, since nothing ever lists user registers. */
9426 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9427 user_reg_add (gdbarch, arm_register_aliases[i].name,
9428 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9429
9430 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9431 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9432
9433 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9434
9435 return gdbarch;
9436 }
9437
9438 static void
9439 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9440 {
9441 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9442
9443 if (tdep == NULL)
9444 return;
9445
9446 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9447 (unsigned long) tdep->lowest_pc);
9448 }
9449
9450 #if GDB_SELF_TEST
9451 namespace selftests
9452 {
9453 static void arm_record_test (void);
9454 }
9455 #endif
9456
9457 void
9458 _initialize_arm_tdep (void)
9459 {
9460 long length;
9461 int i, j;
9462 char regdesc[1024], *rdptr = regdesc;
9463 size_t rest = sizeof (regdesc);
9464
9465 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9466
9467 /* Add ourselves to objfile event chain. */
9468 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9469 arm_exidx_data_key
9470 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9471
9472 /* Register an ELF OS ABI sniffer for ARM binaries. */
9473 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9474 bfd_target_elf_flavour,
9475 arm_elf_osabi_sniffer);
9476
9477 /* Initialize the standard target descriptions. */
9478 initialize_tdesc_arm_with_m ();
9479 initialize_tdesc_arm_with_m_fpa_layout ();
9480 initialize_tdesc_arm_with_m_vfp_d16 ();
9481 initialize_tdesc_arm_with_iwmmxt ();
9482 initialize_tdesc_arm_with_vfpv2 ();
9483 initialize_tdesc_arm_with_vfpv3 ();
9484 initialize_tdesc_arm_with_neon ();
9485
9486 /* Add root prefix command for all "set arm"/"show arm" commands. */
9487 add_prefix_cmd ("arm", no_class, set_arm_command,
9488 _("Various ARM-specific commands."),
9489 &setarmcmdlist, "set arm ", 0, &setlist);
9490
9491 add_prefix_cmd ("arm", no_class, show_arm_command,
9492 _("Various ARM-specific commands."),
9493 &showarmcmdlist, "show arm ", 0, &showlist);
9494
9495
9496 arm_disassembler_options = xstrdup ("reg-names-std");
9497 const disasm_options_t *disasm_options
9498 = &disassembler_options_arm ()->options;
9499 int num_disassembly_styles = 0;
9500 for (i = 0; disasm_options->name[i] != NULL; i++)
9501 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9502 num_disassembly_styles++;
9503
9504 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9505 valid_disassembly_styles = XNEWVEC (const char *,
9506 num_disassembly_styles + 1);
9507 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9508 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9509 {
9510 size_t offset = strlen ("reg-names-");
9511 const char *style = disasm_options->name[i];
9512 valid_disassembly_styles[j++] = &style[offset];
9513 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9514 disasm_options->description[i]);
9515 rdptr += length;
9516 rest -= length;
9517 }
9518 /* Mark the end of valid options. */
9519 valid_disassembly_styles[num_disassembly_styles] = NULL;
9520
9521 /* Create the help text. */
9522 std::string helptext = string_printf ("%s%s%s",
9523 _("The valid values are:\n"),
9524 regdesc,
9525 _("The default is \"std\"."));
9526
9527 add_setshow_enum_cmd("disassembler", no_class,
9528 valid_disassembly_styles, &disassembly_style,
9529 _("Set the disassembly style."),
9530 _("Show the disassembly style."),
9531 helptext.c_str (),
9532 set_disassembly_style_sfunc,
9533 show_disassembly_style_sfunc,
9534 &setarmcmdlist, &showarmcmdlist);
9535
9536 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9537 _("Set usage of ARM 32-bit mode."),
9538 _("Show usage of ARM 32-bit mode."),
9539 _("When off, a 26-bit PC will be used."),
9540 NULL,
9541 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9542 mode is %s. */
9543 &setarmcmdlist, &showarmcmdlist);
9544
9545 /* Add a command to allow the user to force the FPU model. */
9546 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9547 _("Set the floating point type."),
9548 _("Show the floating point type."),
9549 _("auto - Determine the FP typefrom the OS-ABI.\n\
9550 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9551 fpa - FPA co-processor (GCC compiled).\n\
9552 softvfp - Software FP with pure-endian doubles.\n\
9553 vfp - VFP co-processor."),
9554 set_fp_model_sfunc, show_fp_model,
9555 &setarmcmdlist, &showarmcmdlist);
9556
9557 /* Add a command to allow the user to force the ABI. */
9558 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9559 _("Set the ABI."),
9560 _("Show the ABI."),
9561 NULL, arm_set_abi, arm_show_abi,
9562 &setarmcmdlist, &showarmcmdlist);
9563
9564 /* Add two commands to allow the user to force the assumed
9565 execution mode. */
9566 add_setshow_enum_cmd ("fallback-mode", class_support,
9567 arm_mode_strings, &arm_fallback_mode_string,
9568 _("Set the mode assumed when symbols are unavailable."),
9569 _("Show the mode assumed when symbols are unavailable."),
9570 NULL, NULL, arm_show_fallback_mode,
9571 &setarmcmdlist, &showarmcmdlist);
9572 add_setshow_enum_cmd ("force-mode", class_support,
9573 arm_mode_strings, &arm_force_mode_string,
9574 _("Set the mode assumed even when symbols are available."),
9575 _("Show the mode assumed even when symbols are available."),
9576 NULL, NULL, arm_show_force_mode,
9577 &setarmcmdlist, &showarmcmdlist);
9578
9579 /* Debugging flag. */
9580 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9581 _("Set ARM debugging."),
9582 _("Show ARM debugging."),
9583 _("When on, arm-specific debugging is enabled."),
9584 NULL,
9585 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9586 &setdebuglist, &showdebuglist);
9587
9588 #if GDB_SELF_TEST
9589 selftests::register_test ("arm-record", selftests::arm_record_test);
9590 #endif
9591
9592 }
9593
9594 /* ARM-reversible process record data structures. */
9595
9596 #define ARM_INSN_SIZE_BYTES 4
9597 #define THUMB_INSN_SIZE_BYTES 2
9598 #define THUMB2_INSN_SIZE_BYTES 4
9599
9600
9601 /* Position of the bit within a 32-bit ARM instruction
9602 that defines whether the instruction is a load or store. */
9603 #define INSN_S_L_BIT_NUM 20
9604
9605 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9606 do \
9607 { \
9608 unsigned int reg_len = LENGTH; \
9609 if (reg_len) \
9610 { \
9611 REGS = XNEWVEC (uint32_t, reg_len); \
9612 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9613 } \
9614 } \
9615 while (0)
9616
9617 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9618 do \
9619 { \
9620 unsigned int mem_len = LENGTH; \
9621 if (mem_len) \
9622 { \
9623 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9624 memcpy(&MEMS->len, &RECORD_BUF[0], \
9625 sizeof(struct arm_mem_r) * LENGTH); \
9626 } \
9627 } \
9628 while (0)
9629
9630 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9631 #define INSN_RECORDED(ARM_RECORD) \
9632 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9633
9634 /* ARM memory record structure. */
9635 struct arm_mem_r
9636 {
9637 uint32_t len; /* Record length. */
9638 uint32_t addr; /* Memory address. */
9639 };
9640
9641 /* ARM instruction record contains opcode of current insn
9642 and execution state (before entry to decode_insn()),
9643 contains list of to-be-modified registers and
9644 memory blocks (on return from decode_insn()). */
9645
9646 typedef struct insn_decode_record_t
9647 {
9648 struct gdbarch *gdbarch;
9649 struct regcache *regcache;
9650 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9651 uint32_t arm_insn; /* Should accommodate thumb. */
9652 uint32_t cond; /* Condition code. */
9653 uint32_t opcode; /* Insn opcode. */
9654 uint32_t decode; /* Insn decode bits. */
9655 uint32_t mem_rec_count; /* No of mem records. */
9656 uint32_t reg_rec_count; /* No of reg records. */
9657 uint32_t *arm_regs; /* Registers to be saved for this record. */
9658 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9659 } insn_decode_record;
9660
9661
9662 /* Checks ARM SBZ and SBO mandatory fields. */
9663
9664 static int
9665 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9666 {
9667 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9668
9669 if (!len)
9670 return 1;
9671
9672 if (!sbo)
9673 ones = ~ones;
9674
9675 while (ones)
9676 {
9677 if (!(ones & sbo))
9678 {
9679 return 0;
9680 }
9681 ones = ones >> 1;
9682 }
9683 return 1;
9684 }
9685
9686 enum arm_record_result
9687 {
9688 ARM_RECORD_SUCCESS = 0,
9689 ARM_RECORD_FAILURE = 1
9690 };
9691
9692 typedef enum
9693 {
9694 ARM_RECORD_STRH=1,
9695 ARM_RECORD_STRD
9696 } arm_record_strx_t;
9697
9698 typedef enum
9699 {
9700 ARM_RECORD=1,
9701 THUMB_RECORD,
9702 THUMB2_RECORD
9703 } record_type_t;
9704
9705
9706 static int
9707 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9708 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9709 {
9710
9711 struct regcache *reg_cache = arm_insn_r->regcache;
9712 ULONGEST u_regval[2]= {0};
9713
9714 uint32_t reg_src1 = 0, reg_src2 = 0;
9715 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9716
9717 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9718 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9719
9720 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9721 {
9722 /* 1) Handle misc store, immediate offset. */
9723 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9724 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9725 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9726 regcache_raw_read_unsigned (reg_cache, reg_src1,
9727 &u_regval[0]);
9728 if (ARM_PC_REGNUM == reg_src1)
9729 {
9730 /* If R15 was used as Rn, hence current PC+8. */
9731 u_regval[0] = u_regval[0] + 8;
9732 }
9733 offset_8 = (immed_high << 4) | immed_low;
9734 /* Calculate target store address. */
9735 if (14 == arm_insn_r->opcode)
9736 {
9737 tgt_mem_addr = u_regval[0] + offset_8;
9738 }
9739 else
9740 {
9741 tgt_mem_addr = u_regval[0] - offset_8;
9742 }
9743 if (ARM_RECORD_STRH == str_type)
9744 {
9745 record_buf_mem[0] = 2;
9746 record_buf_mem[1] = tgt_mem_addr;
9747 arm_insn_r->mem_rec_count = 1;
9748 }
9749 else if (ARM_RECORD_STRD == str_type)
9750 {
9751 record_buf_mem[0] = 4;
9752 record_buf_mem[1] = tgt_mem_addr;
9753 record_buf_mem[2] = 4;
9754 record_buf_mem[3] = tgt_mem_addr + 4;
9755 arm_insn_r->mem_rec_count = 2;
9756 }
9757 }
9758 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9759 {
9760 /* 2) Store, register offset. */
9761 /* Get Rm. */
9762 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9763 /* Get Rn. */
9764 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9766 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9767 if (15 == reg_src2)
9768 {
9769 /* If R15 was used as Rn, hence current PC+8. */
9770 u_regval[0] = u_regval[0] + 8;
9771 }
9772 /* Calculate target store address, Rn +/- Rm, register offset. */
9773 if (12 == arm_insn_r->opcode)
9774 {
9775 tgt_mem_addr = u_regval[0] + u_regval[1];
9776 }
9777 else
9778 {
9779 tgt_mem_addr = u_regval[1] - u_regval[0];
9780 }
9781 if (ARM_RECORD_STRH == str_type)
9782 {
9783 record_buf_mem[0] = 2;
9784 record_buf_mem[1] = tgt_mem_addr;
9785 arm_insn_r->mem_rec_count = 1;
9786 }
9787 else if (ARM_RECORD_STRD == str_type)
9788 {
9789 record_buf_mem[0] = 4;
9790 record_buf_mem[1] = tgt_mem_addr;
9791 record_buf_mem[2] = 4;
9792 record_buf_mem[3] = tgt_mem_addr + 4;
9793 arm_insn_r->mem_rec_count = 2;
9794 }
9795 }
9796 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9797 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9798 {
9799 /* 3) Store, immediate pre-indexed. */
9800 /* 5) Store, immediate post-indexed. */
9801 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9802 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9803 offset_8 = (immed_high << 4) | immed_low;
9804 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9805 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9806 /* Calculate target store address, Rn +/- Rm, register offset. */
9807 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9808 {
9809 tgt_mem_addr = u_regval[0] + offset_8;
9810 }
9811 else
9812 {
9813 tgt_mem_addr = u_regval[0] - offset_8;
9814 }
9815 if (ARM_RECORD_STRH == str_type)
9816 {
9817 record_buf_mem[0] = 2;
9818 record_buf_mem[1] = tgt_mem_addr;
9819 arm_insn_r->mem_rec_count = 1;
9820 }
9821 else if (ARM_RECORD_STRD == str_type)
9822 {
9823 record_buf_mem[0] = 4;
9824 record_buf_mem[1] = tgt_mem_addr;
9825 record_buf_mem[2] = 4;
9826 record_buf_mem[3] = tgt_mem_addr + 4;
9827 arm_insn_r->mem_rec_count = 2;
9828 }
9829 /* Record Rn also as it changes. */
9830 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9831 arm_insn_r->reg_rec_count = 1;
9832 }
9833 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9834 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9835 {
9836 /* 4) Store, register pre-indexed. */
9837 /* 6) Store, register post -indexed. */
9838 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9839 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9840 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9841 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9842 /* Calculate target store address, Rn +/- Rm, register offset. */
9843 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9844 {
9845 tgt_mem_addr = u_regval[0] + u_regval[1];
9846 }
9847 else
9848 {
9849 tgt_mem_addr = u_regval[1] - u_regval[0];
9850 }
9851 if (ARM_RECORD_STRH == str_type)
9852 {
9853 record_buf_mem[0] = 2;
9854 record_buf_mem[1] = tgt_mem_addr;
9855 arm_insn_r->mem_rec_count = 1;
9856 }
9857 else if (ARM_RECORD_STRD == str_type)
9858 {
9859 record_buf_mem[0] = 4;
9860 record_buf_mem[1] = tgt_mem_addr;
9861 record_buf_mem[2] = 4;
9862 record_buf_mem[3] = tgt_mem_addr + 4;
9863 arm_insn_r->mem_rec_count = 2;
9864 }
9865 /* Record Rn also as it changes. */
9866 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9867 arm_insn_r->reg_rec_count = 1;
9868 }
9869 return 0;
9870 }
9871
9872 /* Handling ARM extension space insns. */
9873
9874 static int
9875 arm_record_extension_space (insn_decode_record *arm_insn_r)
9876 {
9877 int ret = 0; /* Return value: -1:record failure ; 0:success */
9878 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9879 uint32_t record_buf[8], record_buf_mem[8];
9880 uint32_t reg_src1 = 0;
9881 struct regcache *reg_cache = arm_insn_r->regcache;
9882 ULONGEST u_regval = 0;
9883
9884 gdb_assert (!INSN_RECORDED(arm_insn_r));
9885 /* Handle unconditional insn extension space. */
9886
9887 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9888 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9889 if (arm_insn_r->cond)
9890 {
9891 /* PLD has no affect on architectural state, it just affects
9892 the caches. */
9893 if (5 == ((opcode1 & 0xE0) >> 5))
9894 {
9895 /* BLX(1) */
9896 record_buf[0] = ARM_PS_REGNUM;
9897 record_buf[1] = ARM_LR_REGNUM;
9898 arm_insn_r->reg_rec_count = 2;
9899 }
9900 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9901 }
9902
9903
9904 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9905 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9906 {
9907 ret = -1;
9908 /* Undefined instruction on ARM V5; need to handle if later
9909 versions define it. */
9910 }
9911
9912 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9913 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9914 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9915
9916 /* Handle arithmetic insn extension space. */
9917 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9918 && !INSN_RECORDED(arm_insn_r))
9919 {
9920 /* Handle MLA(S) and MUL(S). */
9921 if (in_inclusive_range (insn_op1, 0U, 3U))
9922 {
9923 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9924 record_buf[1] = ARM_PS_REGNUM;
9925 arm_insn_r->reg_rec_count = 2;
9926 }
9927 else if (in_inclusive_range (insn_op1, 4U, 15U))
9928 {
9929 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9930 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9931 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9932 record_buf[2] = ARM_PS_REGNUM;
9933 arm_insn_r->reg_rec_count = 3;
9934 }
9935 }
9936
9937 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9938 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9939 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9940
9941 /* Handle control insn extension space. */
9942
9943 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9944 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9945 {
9946 if (!bit (arm_insn_r->arm_insn,25))
9947 {
9948 if (!bits (arm_insn_r->arm_insn, 4, 7))
9949 {
9950 if ((0 == insn_op1) || (2 == insn_op1))
9951 {
9952 /* MRS. */
9953 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9954 arm_insn_r->reg_rec_count = 1;
9955 }
9956 else if (1 == insn_op1)
9957 {
9958 /* CSPR is going to be changed. */
9959 record_buf[0] = ARM_PS_REGNUM;
9960 arm_insn_r->reg_rec_count = 1;
9961 }
9962 else if (3 == insn_op1)
9963 {
9964 /* SPSR is going to be changed. */
9965 /* We need to get SPSR value, which is yet to be done. */
9966 return -1;
9967 }
9968 }
9969 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9970 {
9971 if (1 == insn_op1)
9972 {
9973 /* BX. */
9974 record_buf[0] = ARM_PS_REGNUM;
9975 arm_insn_r->reg_rec_count = 1;
9976 }
9977 else if (3 == insn_op1)
9978 {
9979 /* CLZ. */
9980 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9981 arm_insn_r->reg_rec_count = 1;
9982 }
9983 }
9984 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9985 {
9986 /* BLX. */
9987 record_buf[0] = ARM_PS_REGNUM;
9988 record_buf[1] = ARM_LR_REGNUM;
9989 arm_insn_r->reg_rec_count = 2;
9990 }
9991 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9992 {
9993 /* QADD, QSUB, QDADD, QDSUB */
9994 record_buf[0] = ARM_PS_REGNUM;
9995 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9996 arm_insn_r->reg_rec_count = 2;
9997 }
9998 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9999 {
10000 /* BKPT. */
10001 record_buf[0] = ARM_PS_REGNUM;
10002 record_buf[1] = ARM_LR_REGNUM;
10003 arm_insn_r->reg_rec_count = 2;
10004
10005 /* Save SPSR also;how? */
10006 return -1;
10007 }
10008 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10009 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10010 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10011 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10012 )
10013 {
10014 if (0 == insn_op1 || 1 == insn_op1)
10015 {
10016 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10017 /* We dont do optimization for SMULW<y> where we
10018 need only Rd. */
10019 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10020 record_buf[1] = ARM_PS_REGNUM;
10021 arm_insn_r->reg_rec_count = 2;
10022 }
10023 else if (2 == insn_op1)
10024 {
10025 /* SMLAL<x><y>. */
10026 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10027 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10028 arm_insn_r->reg_rec_count = 2;
10029 }
10030 else if (3 == insn_op1)
10031 {
10032 /* SMUL<x><y>. */
10033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10034 arm_insn_r->reg_rec_count = 1;
10035 }
10036 }
10037 }
10038 else
10039 {
10040 /* MSR : immediate form. */
10041 if (1 == insn_op1)
10042 {
10043 /* CSPR is going to be changed. */
10044 record_buf[0] = ARM_PS_REGNUM;
10045 arm_insn_r->reg_rec_count = 1;
10046 }
10047 else if (3 == insn_op1)
10048 {
10049 /* SPSR is going to be changed. */
10050 /* we need to get SPSR value, which is yet to be done */
10051 return -1;
10052 }
10053 }
10054 }
10055
10056 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10057 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10058 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10059
10060 /* Handle load/store insn extension space. */
10061
10062 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10063 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10064 && !INSN_RECORDED(arm_insn_r))
10065 {
10066 /* SWP/SWPB. */
10067 if (0 == insn_op1)
10068 {
10069 /* These insn, changes register and memory as well. */
10070 /* SWP or SWPB insn. */
10071 /* Get memory address given by Rn. */
10072 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10073 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10074 /* SWP insn ?, swaps word. */
10075 if (8 == arm_insn_r->opcode)
10076 {
10077 record_buf_mem[0] = 4;
10078 }
10079 else
10080 {
10081 /* SWPB insn, swaps only byte. */
10082 record_buf_mem[0] = 1;
10083 }
10084 record_buf_mem[1] = u_regval;
10085 arm_insn_r->mem_rec_count = 1;
10086 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10087 arm_insn_r->reg_rec_count = 1;
10088 }
10089 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10090 {
10091 /* STRH. */
10092 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10093 ARM_RECORD_STRH);
10094 }
10095 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10096 {
10097 /* LDRD. */
10098 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10099 record_buf[1] = record_buf[0] + 1;
10100 arm_insn_r->reg_rec_count = 2;
10101 }
10102 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10103 {
10104 /* STRD. */
10105 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10106 ARM_RECORD_STRD);
10107 }
10108 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10109 {
10110 /* LDRH, LDRSB, LDRSH. */
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 arm_insn_r->reg_rec_count = 1;
10113 }
10114
10115 }
10116
10117 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10118 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10119 && !INSN_RECORDED(arm_insn_r))
10120 {
10121 ret = -1;
10122 /* Handle coprocessor insn extension space. */
10123 }
10124
10125 /* To be done for ARMv5 and later; as of now we return -1. */
10126 if (-1 == ret)
10127 return ret;
10128
10129 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10130 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10131
10132 return ret;
10133 }
10134
10135 /* Handling opcode 000 insns. */
10136
10137 static int
10138 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10139 {
10140 struct regcache *reg_cache = arm_insn_r->regcache;
10141 uint32_t record_buf[8], record_buf_mem[8];
10142 ULONGEST u_regval[2] = {0};
10143
10144 uint32_t reg_src1 = 0;
10145 uint32_t opcode1 = 0;
10146
10147 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10148 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10149 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10150
10151 if (!((opcode1 & 0x19) == 0x10))
10152 {
10153 /* Data-processing (register) and Data-processing (register-shifted
10154 register */
10155 /* Out of 11 shifter operands mode, all the insn modifies destination
10156 register, which is specified by 13-16 decode. */
10157 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10158 record_buf[1] = ARM_PS_REGNUM;
10159 arm_insn_r->reg_rec_count = 2;
10160 }
10161 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10162 {
10163 /* Miscellaneous instructions */
10164
10165 if (3 == arm_insn_r->decode && 0x12 == opcode1
10166 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10167 {
10168 /* Handle BLX, branch and link/exchange. */
10169 if (9 == arm_insn_r->opcode)
10170 {
10171 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10172 and R14 stores the return address. */
10173 record_buf[0] = ARM_PS_REGNUM;
10174 record_buf[1] = ARM_LR_REGNUM;
10175 arm_insn_r->reg_rec_count = 2;
10176 }
10177 }
10178 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10179 {
10180 /* Handle enhanced software breakpoint insn, BKPT. */
10181 /* CPSR is changed to be executed in ARM state, disabling normal
10182 interrupts, entering abort mode. */
10183 /* According to high vector configuration PC is set. */
10184 /* user hit breakpoint and type reverse, in
10185 that case, we need to go back with previous CPSR and
10186 Program Counter. */
10187 record_buf[0] = ARM_PS_REGNUM;
10188 record_buf[1] = ARM_LR_REGNUM;
10189 arm_insn_r->reg_rec_count = 2;
10190
10191 /* Save SPSR also; how? */
10192 return -1;
10193 }
10194 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10195 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10196 {
10197 /* Handle BX, branch and link/exchange. */
10198 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10199 record_buf[0] = ARM_PS_REGNUM;
10200 arm_insn_r->reg_rec_count = 1;
10201 }
10202 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10203 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10204 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10205 {
10206 /* Count leading zeros: CLZ. */
10207 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10208 arm_insn_r->reg_rec_count = 1;
10209 }
10210 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10211 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10212 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10213 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10214 {
10215 /* Handle MRS insn. */
10216 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10217 arm_insn_r->reg_rec_count = 1;
10218 }
10219 }
10220 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10221 {
10222 /* Multiply and multiply-accumulate */
10223
10224 /* Handle multiply instructions. */
10225 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10226 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10227 {
10228 /* Handle MLA and MUL. */
10229 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10230 record_buf[1] = ARM_PS_REGNUM;
10231 arm_insn_r->reg_rec_count = 2;
10232 }
10233 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10234 {
10235 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10236 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10237 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10238 record_buf[2] = ARM_PS_REGNUM;
10239 arm_insn_r->reg_rec_count = 3;
10240 }
10241 }
10242 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10243 {
10244 /* Synchronization primitives */
10245
10246 /* Handling SWP, SWPB. */
10247 /* These insn, changes register and memory as well. */
10248 /* SWP or SWPB insn. */
10249
10250 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10251 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10252 /* SWP insn ?, swaps word. */
10253 if (8 == arm_insn_r->opcode)
10254 {
10255 record_buf_mem[0] = 4;
10256 }
10257 else
10258 {
10259 /* SWPB insn, swaps only byte. */
10260 record_buf_mem[0] = 1;
10261 }
10262 record_buf_mem[1] = u_regval[0];
10263 arm_insn_r->mem_rec_count = 1;
10264 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10265 arm_insn_r->reg_rec_count = 1;
10266 }
10267 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10268 || 15 == arm_insn_r->decode)
10269 {
10270 if ((opcode1 & 0x12) == 2)
10271 {
10272 /* Extra load/store (unprivileged) */
10273 return -1;
10274 }
10275 else
10276 {
10277 /* Extra load/store */
10278 switch (bits (arm_insn_r->arm_insn, 5, 6))
10279 {
10280 case 1:
10281 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10282 {
10283 /* STRH (register), STRH (immediate) */
10284 arm_record_strx (arm_insn_r, &record_buf[0],
10285 &record_buf_mem[0], ARM_RECORD_STRH);
10286 }
10287 else if ((opcode1 & 0x05) == 0x1)
10288 {
10289 /* LDRH (register) */
10290 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10291 arm_insn_r->reg_rec_count = 1;
10292
10293 if (bit (arm_insn_r->arm_insn, 21))
10294 {
10295 /* Write back to Rn. */
10296 record_buf[arm_insn_r->reg_rec_count++]
10297 = bits (arm_insn_r->arm_insn, 16, 19);
10298 }
10299 }
10300 else if ((opcode1 & 0x05) == 0x5)
10301 {
10302 /* LDRH (immediate), LDRH (literal) */
10303 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10304
10305 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10306 arm_insn_r->reg_rec_count = 1;
10307
10308 if (rn != 15)
10309 {
10310 /*LDRH (immediate) */
10311 if (bit (arm_insn_r->arm_insn, 21))
10312 {
10313 /* Write back to Rn. */
10314 record_buf[arm_insn_r->reg_rec_count++] = rn;
10315 }
10316 }
10317 }
10318 else
10319 return -1;
10320 break;
10321 case 2:
10322 if ((opcode1 & 0x05) == 0x0)
10323 {
10324 /* LDRD (register) */
10325 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10326 record_buf[1] = record_buf[0] + 1;
10327 arm_insn_r->reg_rec_count = 2;
10328
10329 if (bit (arm_insn_r->arm_insn, 21))
10330 {
10331 /* Write back to Rn. */
10332 record_buf[arm_insn_r->reg_rec_count++]
10333 = bits (arm_insn_r->arm_insn, 16, 19);
10334 }
10335 }
10336 else if ((opcode1 & 0x05) == 0x1)
10337 {
10338 /* LDRSB (register) */
10339 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10340 arm_insn_r->reg_rec_count = 1;
10341
10342 if (bit (arm_insn_r->arm_insn, 21))
10343 {
10344 /* Write back to Rn. */
10345 record_buf[arm_insn_r->reg_rec_count++]
10346 = bits (arm_insn_r->arm_insn, 16, 19);
10347 }
10348 }
10349 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10350 {
10351 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10352 LDRSB (literal) */
10353 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10354
10355 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10356 arm_insn_r->reg_rec_count = 1;
10357
10358 if (rn != 15)
10359 {
10360 /*LDRD (immediate), LDRSB (immediate) */
10361 if (bit (arm_insn_r->arm_insn, 21))
10362 {
10363 /* Write back to Rn. */
10364 record_buf[arm_insn_r->reg_rec_count++] = rn;
10365 }
10366 }
10367 }
10368 else
10369 return -1;
10370 break;
10371 case 3:
10372 if ((opcode1 & 0x05) == 0x0)
10373 {
10374 /* STRD (register) */
10375 arm_record_strx (arm_insn_r, &record_buf[0],
10376 &record_buf_mem[0], ARM_RECORD_STRD);
10377 }
10378 else if ((opcode1 & 0x05) == 0x1)
10379 {
10380 /* LDRSH (register) */
10381 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10382 arm_insn_r->reg_rec_count = 1;
10383
10384 if (bit (arm_insn_r->arm_insn, 21))
10385 {
10386 /* Write back to Rn. */
10387 record_buf[arm_insn_r->reg_rec_count++]
10388 = bits (arm_insn_r->arm_insn, 16, 19);
10389 }
10390 }
10391 else if ((opcode1 & 0x05) == 0x4)
10392 {
10393 /* STRD (immediate) */
10394 arm_record_strx (arm_insn_r, &record_buf[0],
10395 &record_buf_mem[0], ARM_RECORD_STRD);
10396 }
10397 else if ((opcode1 & 0x05) == 0x5)
10398 {
10399 /* LDRSH (immediate), LDRSH (literal) */
10400 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10401 arm_insn_r->reg_rec_count = 1;
10402
10403 if (bit (arm_insn_r->arm_insn, 21))
10404 {
10405 /* Write back to Rn. */
10406 record_buf[arm_insn_r->reg_rec_count++]
10407 = bits (arm_insn_r->arm_insn, 16, 19);
10408 }
10409 }
10410 else
10411 return -1;
10412 break;
10413 default:
10414 return -1;
10415 }
10416 }
10417 }
10418 else
10419 {
10420 return -1;
10421 }
10422
10423 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10424 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10425 return 0;
10426 }
10427
10428 /* Handling opcode 001 insns. */
10429
10430 static int
10431 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10432 {
10433 uint32_t record_buf[8], record_buf_mem[8];
10434
10435 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10436 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10437
10438 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10439 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10440 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10441 )
10442 {
10443 /* Handle MSR insn. */
10444 if (9 == arm_insn_r->opcode)
10445 {
10446 /* CSPR is going to be changed. */
10447 record_buf[0] = ARM_PS_REGNUM;
10448 arm_insn_r->reg_rec_count = 1;
10449 }
10450 else
10451 {
10452 /* SPSR is going to be changed. */
10453 }
10454 }
10455 else if (arm_insn_r->opcode <= 15)
10456 {
10457 /* Normal data processing insns. */
10458 /* Out of 11 shifter operands mode, all the insn modifies destination
10459 register, which is specified by 13-16 decode. */
10460 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10461 record_buf[1] = ARM_PS_REGNUM;
10462 arm_insn_r->reg_rec_count = 2;
10463 }
10464 else
10465 {
10466 return -1;
10467 }
10468
10469 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10470 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10471 return 0;
10472 }
10473
10474 static int
10475 arm_record_media (insn_decode_record *arm_insn_r)
10476 {
10477 uint32_t record_buf[8];
10478
10479 switch (bits (arm_insn_r->arm_insn, 22, 24))
10480 {
10481 case 0:
10482 /* Parallel addition and subtraction, signed */
10483 case 1:
10484 /* Parallel addition and subtraction, unsigned */
10485 case 2:
10486 case 3:
10487 /* Packing, unpacking, saturation and reversal */
10488 {
10489 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10490
10491 record_buf[arm_insn_r->reg_rec_count++] = rd;
10492 }
10493 break;
10494
10495 case 4:
10496 case 5:
10497 /* Signed multiplies */
10498 {
10499 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10500 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10501
10502 record_buf[arm_insn_r->reg_rec_count++] = rd;
10503 if (op1 == 0x0)
10504 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10505 else if (op1 == 0x4)
10506 record_buf[arm_insn_r->reg_rec_count++]
10507 = bits (arm_insn_r->arm_insn, 12, 15);
10508 }
10509 break;
10510
10511 case 6:
10512 {
10513 if (bit (arm_insn_r->arm_insn, 21)
10514 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10515 {
10516 /* SBFX */
10517 record_buf[arm_insn_r->reg_rec_count++]
10518 = bits (arm_insn_r->arm_insn, 12, 15);
10519 }
10520 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10521 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10522 {
10523 /* USAD8 and USADA8 */
10524 record_buf[arm_insn_r->reg_rec_count++]
10525 = bits (arm_insn_r->arm_insn, 16, 19);
10526 }
10527 }
10528 break;
10529
10530 case 7:
10531 {
10532 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10533 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10534 {
10535 /* Permanently UNDEFINED */
10536 return -1;
10537 }
10538 else
10539 {
10540 /* BFC, BFI and UBFX */
10541 record_buf[arm_insn_r->reg_rec_count++]
10542 = bits (arm_insn_r->arm_insn, 12, 15);
10543 }
10544 }
10545 break;
10546
10547 default:
10548 return -1;
10549 }
10550
10551 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10552
10553 return 0;
10554 }
10555
10556 /* Handle ARM mode instructions with opcode 010. */
10557
10558 static int
10559 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10560 {
10561 struct regcache *reg_cache = arm_insn_r->regcache;
10562
10563 uint32_t reg_base , reg_dest;
10564 uint32_t offset_12, tgt_mem_addr;
10565 uint32_t record_buf[8], record_buf_mem[8];
10566 unsigned char wback;
10567 ULONGEST u_regval;
10568
10569 /* Calculate wback. */
10570 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10571 || (bit (arm_insn_r->arm_insn, 21) == 1);
10572
10573 arm_insn_r->reg_rec_count = 0;
10574 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10575
10576 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10577 {
10578 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10579 and LDRT. */
10580
10581 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10582 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10583
10584 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10585 preceeds a LDR instruction having R15 as reg_base, it
10586 emulates a branch and link instruction, and hence we need to save
10587 CPSR and PC as well. */
10588 if (ARM_PC_REGNUM == reg_dest)
10589 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10590
10591 /* If wback is true, also save the base register, which is going to be
10592 written to. */
10593 if (wback)
10594 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10595 }
10596 else
10597 {
10598 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10599
10600 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10601 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10602
10603 /* Handle bit U. */
10604 if (bit (arm_insn_r->arm_insn, 23))
10605 {
10606 /* U == 1: Add the offset. */
10607 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10608 }
10609 else
10610 {
10611 /* U == 0: subtract the offset. */
10612 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10613 }
10614
10615 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10616 bytes. */
10617 if (bit (arm_insn_r->arm_insn, 22))
10618 {
10619 /* STRB and STRBT: 1 byte. */
10620 record_buf_mem[0] = 1;
10621 }
10622 else
10623 {
10624 /* STR and STRT: 4 bytes. */
10625 record_buf_mem[0] = 4;
10626 }
10627
10628 /* Handle bit P. */
10629 if (bit (arm_insn_r->arm_insn, 24))
10630 record_buf_mem[1] = tgt_mem_addr;
10631 else
10632 record_buf_mem[1] = (uint32_t) u_regval;
10633
10634 arm_insn_r->mem_rec_count = 1;
10635
10636 /* If wback is true, also save the base register, which is going to be
10637 written to. */
10638 if (wback)
10639 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10640 }
10641
10642 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10643 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10644 return 0;
10645 }
10646
10647 /* Handling opcode 011 insns. */
10648
10649 static int
10650 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10651 {
10652 struct regcache *reg_cache = arm_insn_r->regcache;
10653
10654 uint32_t shift_imm = 0;
10655 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10656 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10657 uint32_t record_buf[8], record_buf_mem[8];
10658
10659 LONGEST s_word;
10660 ULONGEST u_regval[2];
10661
10662 if (bit (arm_insn_r->arm_insn, 4))
10663 return arm_record_media (arm_insn_r);
10664
10665 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10666 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10667
10668 /* Handle enhanced store insns and LDRD DSP insn,
10669 order begins according to addressing modes for store insns
10670 STRH insn. */
10671
10672 /* LDR or STR? */
10673 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10674 {
10675 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10676 /* LDR insn has a capability to do branching, if
10677 MOV LR, PC is precedded by LDR insn having Rn as R15
10678 in that case, it emulates branch and link insn, and hence we
10679 need to save CSPR and PC as well. */
10680 if (15 != reg_dest)
10681 {
10682 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10683 arm_insn_r->reg_rec_count = 1;
10684 }
10685 else
10686 {
10687 record_buf[0] = reg_dest;
10688 record_buf[1] = ARM_PS_REGNUM;
10689 arm_insn_r->reg_rec_count = 2;
10690 }
10691 }
10692 else
10693 {
10694 if (! bits (arm_insn_r->arm_insn, 4, 11))
10695 {
10696 /* Store insn, register offset and register pre-indexed,
10697 register post-indexed. */
10698 /* Get Rm. */
10699 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10700 /* Get Rn. */
10701 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10702 regcache_raw_read_unsigned (reg_cache, reg_src1
10703 , &u_regval[0]);
10704 regcache_raw_read_unsigned (reg_cache, reg_src2
10705 , &u_regval[1]);
10706 if (15 == reg_src2)
10707 {
10708 /* If R15 was used as Rn, hence current PC+8. */
10709 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10710 u_regval[0] = u_regval[0] + 8;
10711 }
10712 /* Calculate target store address, Rn +/- Rm, register offset. */
10713 /* U == 1. */
10714 if (bit (arm_insn_r->arm_insn, 23))
10715 {
10716 tgt_mem_addr = u_regval[0] + u_regval[1];
10717 }
10718 else
10719 {
10720 tgt_mem_addr = u_regval[1] - u_regval[0];
10721 }
10722
10723 switch (arm_insn_r->opcode)
10724 {
10725 /* STR. */
10726 case 8:
10727 case 12:
10728 /* STR. */
10729 case 9:
10730 case 13:
10731 /* STRT. */
10732 case 1:
10733 case 5:
10734 /* STR. */
10735 case 0:
10736 case 4:
10737 record_buf_mem[0] = 4;
10738 break;
10739
10740 /* STRB. */
10741 case 10:
10742 case 14:
10743 /* STRB. */
10744 case 11:
10745 case 15:
10746 /* STRBT. */
10747 case 3:
10748 case 7:
10749 /* STRB. */
10750 case 2:
10751 case 6:
10752 record_buf_mem[0] = 1;
10753 break;
10754
10755 default:
10756 gdb_assert_not_reached ("no decoding pattern found");
10757 break;
10758 }
10759 record_buf_mem[1] = tgt_mem_addr;
10760 arm_insn_r->mem_rec_count = 1;
10761
10762 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10763 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10764 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10765 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10766 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10767 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10768 )
10769 {
10770 /* Rn is going to be changed in pre-indexed mode and
10771 post-indexed mode as well. */
10772 record_buf[0] = reg_src2;
10773 arm_insn_r->reg_rec_count = 1;
10774 }
10775 }
10776 else
10777 {
10778 /* Store insn, scaled register offset; scaled pre-indexed. */
10779 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10780 /* Get Rm. */
10781 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10782 /* Get Rn. */
10783 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10784 /* Get shift_imm. */
10785 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10786 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10787 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10788 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10789 /* Offset_12 used as shift. */
10790 switch (offset_12)
10791 {
10792 case 0:
10793 /* Offset_12 used as index. */
10794 offset_12 = u_regval[0] << shift_imm;
10795 break;
10796
10797 case 1:
10798 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10799 break;
10800
10801 case 2:
10802 if (!shift_imm)
10803 {
10804 if (bit (u_regval[0], 31))
10805 {
10806 offset_12 = 0xFFFFFFFF;
10807 }
10808 else
10809 {
10810 offset_12 = 0;
10811 }
10812 }
10813 else
10814 {
10815 /* This is arithmetic shift. */
10816 offset_12 = s_word >> shift_imm;
10817 }
10818 break;
10819
10820 case 3:
10821 if (!shift_imm)
10822 {
10823 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10824 &u_regval[1]);
10825 /* Get C flag value and shift it by 31. */
10826 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10827 | (u_regval[0]) >> 1);
10828 }
10829 else
10830 {
10831 offset_12 = (u_regval[0] >> shift_imm) \
10832 | (u_regval[0] <<
10833 (sizeof(uint32_t) - shift_imm));
10834 }
10835 break;
10836
10837 default:
10838 gdb_assert_not_reached ("no decoding pattern found");
10839 break;
10840 }
10841
10842 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10843 /* bit U set. */
10844 if (bit (arm_insn_r->arm_insn, 23))
10845 {
10846 tgt_mem_addr = u_regval[1] + offset_12;
10847 }
10848 else
10849 {
10850 tgt_mem_addr = u_regval[1] - offset_12;
10851 }
10852
10853 switch (arm_insn_r->opcode)
10854 {
10855 /* STR. */
10856 case 8:
10857 case 12:
10858 /* STR. */
10859 case 9:
10860 case 13:
10861 /* STRT. */
10862 case 1:
10863 case 5:
10864 /* STR. */
10865 case 0:
10866 case 4:
10867 record_buf_mem[0] = 4;
10868 break;
10869
10870 /* STRB. */
10871 case 10:
10872 case 14:
10873 /* STRB. */
10874 case 11:
10875 case 15:
10876 /* STRBT. */
10877 case 3:
10878 case 7:
10879 /* STRB. */
10880 case 2:
10881 case 6:
10882 record_buf_mem[0] = 1;
10883 break;
10884
10885 default:
10886 gdb_assert_not_reached ("no decoding pattern found");
10887 break;
10888 }
10889 record_buf_mem[1] = tgt_mem_addr;
10890 arm_insn_r->mem_rec_count = 1;
10891
10892 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10893 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10894 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10895 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10896 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10897 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10898 )
10899 {
10900 /* Rn is going to be changed in register scaled pre-indexed
10901 mode,and scaled post indexed mode. */
10902 record_buf[0] = reg_src2;
10903 arm_insn_r->reg_rec_count = 1;
10904 }
10905 }
10906 }
10907
10908 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10909 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10910 return 0;
10911 }
10912
10913 /* Handle ARM mode instructions with opcode 100. */
10914
10915 static int
10916 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10917 {
10918 struct regcache *reg_cache = arm_insn_r->regcache;
10919 uint32_t register_count = 0, register_bits;
10920 uint32_t reg_base, addr_mode;
10921 uint32_t record_buf[24], record_buf_mem[48];
10922 uint32_t wback;
10923 ULONGEST u_regval;
10924
10925 /* Fetch the list of registers. */
10926 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10927 arm_insn_r->reg_rec_count = 0;
10928
10929 /* Fetch the base register that contains the address we are loading data
10930 to. */
10931 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10932
10933 /* Calculate wback. */
10934 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10935
10936 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10937 {
10938 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10939
10940 /* Find out which registers are going to be loaded from memory. */
10941 while (register_bits)
10942 {
10943 if (register_bits & 0x00000001)
10944 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10945 register_bits = register_bits >> 1;
10946 register_count++;
10947 }
10948
10949
10950 /* If wback is true, also save the base register, which is going to be
10951 written to. */
10952 if (wback)
10953 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10954
10955 /* Save the CPSR register. */
10956 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10957 }
10958 else
10959 {
10960 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10961
10962 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10963
10964 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10965
10966 /* Find out how many registers are going to be stored to memory. */
10967 while (register_bits)
10968 {
10969 if (register_bits & 0x00000001)
10970 register_count++;
10971 register_bits = register_bits >> 1;
10972 }
10973
10974 switch (addr_mode)
10975 {
10976 /* STMDA (STMED): Decrement after. */
10977 case 0:
10978 record_buf_mem[1] = (uint32_t) u_regval
10979 - register_count * INT_REGISTER_SIZE + 4;
10980 break;
10981 /* STM (STMIA, STMEA): Increment after. */
10982 case 1:
10983 record_buf_mem[1] = (uint32_t) u_regval;
10984 break;
10985 /* STMDB (STMFD): Decrement before. */
10986 case 2:
10987 record_buf_mem[1] = (uint32_t) u_regval
10988 - register_count * INT_REGISTER_SIZE;
10989 break;
10990 /* STMIB (STMFA): Increment before. */
10991 case 3:
10992 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10993 break;
10994 default:
10995 gdb_assert_not_reached ("no decoding pattern found");
10996 break;
10997 }
10998
10999 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11000 arm_insn_r->mem_rec_count = 1;
11001
11002 /* If wback is true, also save the base register, which is going to be
11003 written to. */
11004 if (wback)
11005 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11006 }
11007
11008 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11009 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11010 return 0;
11011 }
11012
11013 /* Handling opcode 101 insns. */
11014
11015 static int
11016 arm_record_b_bl (insn_decode_record *arm_insn_r)
11017 {
11018 uint32_t record_buf[8];
11019
11020 /* Handle B, BL, BLX(1) insns. */
11021 /* B simply branches so we do nothing here. */
11022 /* Note: BLX(1) doesnt fall here but instead it falls into
11023 extension space. */
11024 if (bit (arm_insn_r->arm_insn, 24))
11025 {
11026 record_buf[0] = ARM_LR_REGNUM;
11027 arm_insn_r->reg_rec_count = 1;
11028 }
11029
11030 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11031
11032 return 0;
11033 }
11034
11035 static int
11036 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11037 {
11038 printf_unfiltered (_("Process record does not support instruction "
11039 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11040 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11041
11042 return -1;
11043 }
11044
11045 /* Record handler for vector data transfer instructions. */
11046
11047 static int
11048 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11049 {
11050 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11051 uint32_t record_buf[4];
11052
11053 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11054 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11055 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11056 bit_l = bit (arm_insn_r->arm_insn, 20);
11057 bit_c = bit (arm_insn_r->arm_insn, 8);
11058
11059 /* Handle VMOV instruction. */
11060 if (bit_l && bit_c)
11061 {
11062 record_buf[0] = reg_t;
11063 arm_insn_r->reg_rec_count = 1;
11064 }
11065 else if (bit_l && !bit_c)
11066 {
11067 /* Handle VMOV instruction. */
11068 if (bits_a == 0x00)
11069 {
11070 record_buf[0] = reg_t;
11071 arm_insn_r->reg_rec_count = 1;
11072 }
11073 /* Handle VMRS instruction. */
11074 else if (bits_a == 0x07)
11075 {
11076 if (reg_t == 15)
11077 reg_t = ARM_PS_REGNUM;
11078
11079 record_buf[0] = reg_t;
11080 arm_insn_r->reg_rec_count = 1;
11081 }
11082 }
11083 else if (!bit_l && !bit_c)
11084 {
11085 /* Handle VMOV instruction. */
11086 if (bits_a == 0x00)
11087 {
11088 record_buf[0] = ARM_D0_REGNUM + reg_v;
11089
11090 arm_insn_r->reg_rec_count = 1;
11091 }
11092 /* Handle VMSR instruction. */
11093 else if (bits_a == 0x07)
11094 {
11095 record_buf[0] = ARM_FPSCR_REGNUM;
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 }
11099 else if (!bit_l && bit_c)
11100 {
11101 /* Handle VMOV instruction. */
11102 if (!(bits_a & 0x04))
11103 {
11104 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11105 + ARM_D0_REGNUM;
11106 arm_insn_r->reg_rec_count = 1;
11107 }
11108 /* Handle VDUP instruction. */
11109 else
11110 {
11111 if (bit (arm_insn_r->arm_insn, 21))
11112 {
11113 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11114 record_buf[0] = reg_v + ARM_D0_REGNUM;
11115 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11116 arm_insn_r->reg_rec_count = 2;
11117 }
11118 else
11119 {
11120 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11121 record_buf[0] = reg_v + ARM_D0_REGNUM;
11122 arm_insn_r->reg_rec_count = 1;
11123 }
11124 }
11125 }
11126
11127 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11128 return 0;
11129 }
11130
11131 /* Record handler for extension register load/store instructions. */
11132
11133 static int
11134 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11135 {
11136 uint32_t opcode, single_reg;
11137 uint8_t op_vldm_vstm;
11138 uint32_t record_buf[8], record_buf_mem[128];
11139 ULONGEST u_regval = 0;
11140
11141 struct regcache *reg_cache = arm_insn_r->regcache;
11142
11143 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11144 single_reg = !bit (arm_insn_r->arm_insn, 8);
11145 op_vldm_vstm = opcode & 0x1b;
11146
11147 /* Handle VMOV instructions. */
11148 if ((opcode & 0x1e) == 0x04)
11149 {
11150 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11151 {
11152 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11153 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11154 arm_insn_r->reg_rec_count = 2;
11155 }
11156 else
11157 {
11158 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11159 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11160
11161 if (single_reg)
11162 {
11163 /* The first S register number m is REG_M:M (M is bit 5),
11164 the corresponding D register number is REG_M:M / 2, which
11165 is REG_M. */
11166 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11167 /* The second S register number is REG_M:M + 1, the
11168 corresponding D register number is (REG_M:M + 1) / 2.
11169 IOW, if bit M is 1, the first and second S registers
11170 are mapped to different D registers, otherwise, they are
11171 in the same D register. */
11172 if (bit_m)
11173 {
11174 record_buf[arm_insn_r->reg_rec_count++]
11175 = ARM_D0_REGNUM + reg_m + 1;
11176 }
11177 }
11178 else
11179 {
11180 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11181 arm_insn_r->reg_rec_count = 1;
11182 }
11183 }
11184 }
11185 /* Handle VSTM and VPUSH instructions. */
11186 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11187 || op_vldm_vstm == 0x12)
11188 {
11189 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11190 uint32_t memory_index = 0;
11191
11192 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11193 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11194 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11195 imm_off32 = imm_off8 << 2;
11196 memory_count = imm_off8;
11197
11198 if (bit (arm_insn_r->arm_insn, 23))
11199 start_address = u_regval;
11200 else
11201 start_address = u_regval - imm_off32;
11202
11203 if (bit (arm_insn_r->arm_insn, 21))
11204 {
11205 record_buf[0] = reg_rn;
11206 arm_insn_r->reg_rec_count = 1;
11207 }
11208
11209 while (memory_count > 0)
11210 {
11211 if (single_reg)
11212 {
11213 record_buf_mem[memory_index] = 4;
11214 record_buf_mem[memory_index + 1] = start_address;
11215 start_address = start_address + 4;
11216 memory_index = memory_index + 2;
11217 }
11218 else
11219 {
11220 record_buf_mem[memory_index] = 4;
11221 record_buf_mem[memory_index + 1] = start_address;
11222 record_buf_mem[memory_index + 2] = 4;
11223 record_buf_mem[memory_index + 3] = start_address + 4;
11224 start_address = start_address + 8;
11225 memory_index = memory_index + 4;
11226 }
11227 memory_count--;
11228 }
11229 arm_insn_r->mem_rec_count = (memory_index >> 1);
11230 }
11231 /* Handle VLDM instructions. */
11232 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11233 || op_vldm_vstm == 0x13)
11234 {
11235 uint32_t reg_count, reg_vd;
11236 uint32_t reg_index = 0;
11237 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11238
11239 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11240 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11241
11242 /* REG_VD is the first D register number. If the instruction
11243 loads memory to S registers (SINGLE_REG is TRUE), the register
11244 number is (REG_VD << 1 | bit D), so the corresponding D
11245 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11246 if (!single_reg)
11247 reg_vd = reg_vd | (bit_d << 4);
11248
11249 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11250 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11251
11252 /* If the instruction loads memory to D register, REG_COUNT should
11253 be divided by 2, according to the ARM Architecture Reference
11254 Manual. If the instruction loads memory to S register, divide by
11255 2 as well because two S registers are mapped to D register. */
11256 reg_count = reg_count / 2;
11257 if (single_reg && bit_d)
11258 {
11259 /* Increase the register count if S register list starts from
11260 an odd number (bit d is one). */
11261 reg_count++;
11262 }
11263
11264 while (reg_count > 0)
11265 {
11266 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11267 reg_count--;
11268 }
11269 arm_insn_r->reg_rec_count = reg_index;
11270 }
11271 /* VSTR Vector store register. */
11272 else if ((opcode & 0x13) == 0x10)
11273 {
11274 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11275 uint32_t memory_index = 0;
11276
11277 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11278 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11279 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11280 imm_off32 = imm_off8 << 2;
11281
11282 if (bit (arm_insn_r->arm_insn, 23))
11283 start_address = u_regval + imm_off32;
11284 else
11285 start_address = u_regval - imm_off32;
11286
11287 if (single_reg)
11288 {
11289 record_buf_mem[memory_index] = 4;
11290 record_buf_mem[memory_index + 1] = start_address;
11291 arm_insn_r->mem_rec_count = 1;
11292 }
11293 else
11294 {
11295 record_buf_mem[memory_index] = 4;
11296 record_buf_mem[memory_index + 1] = start_address;
11297 record_buf_mem[memory_index + 2] = 4;
11298 record_buf_mem[memory_index + 3] = start_address + 4;
11299 arm_insn_r->mem_rec_count = 2;
11300 }
11301 }
11302 /* VLDR Vector load register. */
11303 else if ((opcode & 0x13) == 0x11)
11304 {
11305 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11306
11307 if (!single_reg)
11308 {
11309 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11310 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11311 }
11312 else
11313 {
11314 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11315 /* Record register D rather than pseudo register S. */
11316 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11317 }
11318 arm_insn_r->reg_rec_count = 1;
11319 }
11320
11321 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11322 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11323 return 0;
11324 }
11325
11326 /* Record handler for arm/thumb mode VFP data processing instructions. */
11327
11328 static int
11329 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11330 {
11331 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11332 uint32_t record_buf[4];
11333 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11334 enum insn_types curr_insn_type = INSN_INV;
11335
11336 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11337 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11338 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11339 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11340 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11341 bit_d = bit (arm_insn_r->arm_insn, 22);
11342 /* Mask off the "D" bit. */
11343 opc1 = opc1 & ~0x04;
11344
11345 /* Handle VMLA, VMLS. */
11346 if (opc1 == 0x00)
11347 {
11348 if (bit (arm_insn_r->arm_insn, 10))
11349 {
11350 if (bit (arm_insn_r->arm_insn, 6))
11351 curr_insn_type = INSN_T0;
11352 else
11353 curr_insn_type = INSN_T1;
11354 }
11355 else
11356 {
11357 if (dp_op_sz)
11358 curr_insn_type = INSN_T1;
11359 else
11360 curr_insn_type = INSN_T2;
11361 }
11362 }
11363 /* Handle VNMLA, VNMLS, VNMUL. */
11364 else if (opc1 == 0x01)
11365 {
11366 if (dp_op_sz)
11367 curr_insn_type = INSN_T1;
11368 else
11369 curr_insn_type = INSN_T2;
11370 }
11371 /* Handle VMUL. */
11372 else if (opc1 == 0x02 && !(opc3 & 0x01))
11373 {
11374 if (bit (arm_insn_r->arm_insn, 10))
11375 {
11376 if (bit (arm_insn_r->arm_insn, 6))
11377 curr_insn_type = INSN_T0;
11378 else
11379 curr_insn_type = INSN_T1;
11380 }
11381 else
11382 {
11383 if (dp_op_sz)
11384 curr_insn_type = INSN_T1;
11385 else
11386 curr_insn_type = INSN_T2;
11387 }
11388 }
11389 /* Handle VADD, VSUB. */
11390 else if (opc1 == 0x03)
11391 {
11392 if (!bit (arm_insn_r->arm_insn, 9))
11393 {
11394 if (bit (arm_insn_r->arm_insn, 6))
11395 curr_insn_type = INSN_T0;
11396 else
11397 curr_insn_type = INSN_T1;
11398 }
11399 else
11400 {
11401 if (dp_op_sz)
11402 curr_insn_type = INSN_T1;
11403 else
11404 curr_insn_type = INSN_T2;
11405 }
11406 }
11407 /* Handle VDIV. */
11408 else if (opc1 == 0x08)
11409 {
11410 if (dp_op_sz)
11411 curr_insn_type = INSN_T1;
11412 else
11413 curr_insn_type = INSN_T2;
11414 }
11415 /* Handle all other vfp data processing instructions. */
11416 else if (opc1 == 0x0b)
11417 {
11418 /* Handle VMOV. */
11419 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11420 {
11421 if (bit (arm_insn_r->arm_insn, 4))
11422 {
11423 if (bit (arm_insn_r->arm_insn, 6))
11424 curr_insn_type = INSN_T0;
11425 else
11426 curr_insn_type = INSN_T1;
11427 }
11428 else
11429 {
11430 if (dp_op_sz)
11431 curr_insn_type = INSN_T1;
11432 else
11433 curr_insn_type = INSN_T2;
11434 }
11435 }
11436 /* Handle VNEG and VABS. */
11437 else if ((opc2 == 0x01 && opc3 == 0x01)
11438 || (opc2 == 0x00 && opc3 == 0x03))
11439 {
11440 if (!bit (arm_insn_r->arm_insn, 11))
11441 {
11442 if (bit (arm_insn_r->arm_insn, 6))
11443 curr_insn_type = INSN_T0;
11444 else
11445 curr_insn_type = INSN_T1;
11446 }
11447 else
11448 {
11449 if (dp_op_sz)
11450 curr_insn_type = INSN_T1;
11451 else
11452 curr_insn_type = INSN_T2;
11453 }
11454 }
11455 /* Handle VSQRT. */
11456 else if (opc2 == 0x01 && opc3 == 0x03)
11457 {
11458 if (dp_op_sz)
11459 curr_insn_type = INSN_T1;
11460 else
11461 curr_insn_type = INSN_T2;
11462 }
11463 /* Handle VCVT. */
11464 else if (opc2 == 0x07 && opc3 == 0x03)
11465 {
11466 if (!dp_op_sz)
11467 curr_insn_type = INSN_T1;
11468 else
11469 curr_insn_type = INSN_T2;
11470 }
11471 else if (opc3 & 0x01)
11472 {
11473 /* Handle VCVT. */
11474 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11475 {
11476 if (!bit (arm_insn_r->arm_insn, 18))
11477 curr_insn_type = INSN_T2;
11478 else
11479 {
11480 if (dp_op_sz)
11481 curr_insn_type = INSN_T1;
11482 else
11483 curr_insn_type = INSN_T2;
11484 }
11485 }
11486 /* Handle VCVT. */
11487 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11488 {
11489 if (dp_op_sz)
11490 curr_insn_type = INSN_T1;
11491 else
11492 curr_insn_type = INSN_T2;
11493 }
11494 /* Handle VCVTB, VCVTT. */
11495 else if ((opc2 & 0x0e) == 0x02)
11496 curr_insn_type = INSN_T2;
11497 /* Handle VCMP, VCMPE. */
11498 else if ((opc2 & 0x0e) == 0x04)
11499 curr_insn_type = INSN_T3;
11500 }
11501 }
11502
11503 switch (curr_insn_type)
11504 {
11505 case INSN_T0:
11506 reg_vd = reg_vd | (bit_d << 4);
11507 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11508 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11509 arm_insn_r->reg_rec_count = 2;
11510 break;
11511
11512 case INSN_T1:
11513 reg_vd = reg_vd | (bit_d << 4);
11514 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11515 arm_insn_r->reg_rec_count = 1;
11516 break;
11517
11518 case INSN_T2:
11519 reg_vd = (reg_vd << 1) | bit_d;
11520 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11521 arm_insn_r->reg_rec_count = 1;
11522 break;
11523
11524 case INSN_T3:
11525 record_buf[0] = ARM_FPSCR_REGNUM;
11526 arm_insn_r->reg_rec_count = 1;
11527 break;
11528
11529 default:
11530 gdb_assert_not_reached ("no decoding pattern found");
11531 break;
11532 }
11533
11534 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11535 return 0;
11536 }
11537
11538 /* Handling opcode 110 insns. */
11539
11540 static int
11541 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11542 {
11543 uint32_t op1, op1_ebit, coproc;
11544
11545 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11546 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11547 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11548
11549 if ((coproc & 0x0e) == 0x0a)
11550 {
11551 /* Handle extension register ld/st instructions. */
11552 if (!(op1 & 0x20))
11553 return arm_record_exreg_ld_st_insn (arm_insn_r);
11554
11555 /* 64-bit transfers between arm core and extension registers. */
11556 if ((op1 & 0x3e) == 0x04)
11557 return arm_record_exreg_ld_st_insn (arm_insn_r);
11558 }
11559 else
11560 {
11561 /* Handle coprocessor ld/st instructions. */
11562 if (!(op1 & 0x3a))
11563 {
11564 /* Store. */
11565 if (!op1_ebit)
11566 return arm_record_unsupported_insn (arm_insn_r);
11567 else
11568 /* Load. */
11569 return arm_record_unsupported_insn (arm_insn_r);
11570 }
11571
11572 /* Move to coprocessor from two arm core registers. */
11573 if (op1 == 0x4)
11574 return arm_record_unsupported_insn (arm_insn_r);
11575
11576 /* Move to two arm core registers from coprocessor. */
11577 if (op1 == 0x5)
11578 {
11579 uint32_t reg_t[2];
11580
11581 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11582 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11583 arm_insn_r->reg_rec_count = 2;
11584
11585 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11586 return 0;
11587 }
11588 }
11589 return arm_record_unsupported_insn (arm_insn_r);
11590 }
11591
11592 /* Handling opcode 111 insns. */
11593
11594 static int
11595 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11596 {
11597 uint32_t op, op1_ebit, coproc, bits_24_25;
11598 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11599 struct regcache *reg_cache = arm_insn_r->regcache;
11600
11601 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11602 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11603 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11604 op = bit (arm_insn_r->arm_insn, 4);
11605 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11606
11607 /* Handle arm SWI/SVC system call instructions. */
11608 if (bits_24_25 == 0x3)
11609 {
11610 if (tdep->arm_syscall_record != NULL)
11611 {
11612 ULONGEST svc_operand, svc_number;
11613
11614 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11615
11616 if (svc_operand) /* OABI. */
11617 svc_number = svc_operand - 0x900000;
11618 else /* EABI. */
11619 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11620
11621 return tdep->arm_syscall_record (reg_cache, svc_number);
11622 }
11623 else
11624 {
11625 printf_unfiltered (_("no syscall record support\n"));
11626 return -1;
11627 }
11628 }
11629 else if (bits_24_25 == 0x02)
11630 {
11631 if (op)
11632 {
11633 if ((coproc & 0x0e) == 0x0a)
11634 {
11635 /* 8, 16, and 32-bit transfer */
11636 return arm_record_vdata_transfer_insn (arm_insn_r);
11637 }
11638 else
11639 {
11640 if (op1_ebit)
11641 {
11642 /* MRC, MRC2 */
11643 uint32_t record_buf[1];
11644
11645 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11646 if (record_buf[0] == 15)
11647 record_buf[0] = ARM_PS_REGNUM;
11648
11649 arm_insn_r->reg_rec_count = 1;
11650 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11651 record_buf);
11652 return 0;
11653 }
11654 else
11655 {
11656 /* MCR, MCR2 */
11657 return -1;
11658 }
11659 }
11660 }
11661 else
11662 {
11663 if ((coproc & 0x0e) == 0x0a)
11664 {
11665 /* VFP data-processing instructions. */
11666 return arm_record_vfp_data_proc_insn (arm_insn_r);
11667 }
11668 else
11669 {
11670 /* CDP, CDP2 */
11671 return -1;
11672 }
11673 }
11674 }
11675 else
11676 {
11677 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11678
11679 if (op1 == 5)
11680 {
11681 if ((coproc & 0x0e) != 0x0a)
11682 {
11683 /* MRRC, MRRC2 */
11684 return -1;
11685 }
11686 }
11687 else if (op1 == 4 || op1 == 5)
11688 {
11689 if ((coproc & 0x0e) == 0x0a)
11690 {
11691 /* 64-bit transfers between ARM core and extension */
11692 return -1;
11693 }
11694 else if (op1 == 4)
11695 {
11696 /* MCRR, MCRR2 */
11697 return -1;
11698 }
11699 }
11700 else if (op1 == 0 || op1 == 1)
11701 {
11702 /* UNDEFINED */
11703 return -1;
11704 }
11705 else
11706 {
11707 if ((coproc & 0x0e) == 0x0a)
11708 {
11709 /* Extension register load/store */
11710 }
11711 else
11712 {
11713 /* STC, STC2, LDC, LDC2 */
11714 }
11715 return -1;
11716 }
11717 }
11718
11719 return -1;
11720 }
11721
11722 /* Handling opcode 000 insns. */
11723
11724 static int
11725 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11726 {
11727 uint32_t record_buf[8];
11728 uint32_t reg_src1 = 0;
11729
11730 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11731
11732 record_buf[0] = ARM_PS_REGNUM;
11733 record_buf[1] = reg_src1;
11734 thumb_insn_r->reg_rec_count = 2;
11735
11736 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11737
11738 return 0;
11739 }
11740
11741
11742 /* Handling opcode 001 insns. */
11743
11744 static int
11745 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11746 {
11747 uint32_t record_buf[8];
11748 uint32_t reg_src1 = 0;
11749
11750 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11751
11752 record_buf[0] = ARM_PS_REGNUM;
11753 record_buf[1] = reg_src1;
11754 thumb_insn_r->reg_rec_count = 2;
11755
11756 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11757
11758 return 0;
11759 }
11760
11761 /* Handling opcode 010 insns. */
11762
11763 static int
11764 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11765 {
11766 struct regcache *reg_cache = thumb_insn_r->regcache;
11767 uint32_t record_buf[8], record_buf_mem[8];
11768
11769 uint32_t reg_src1 = 0, reg_src2 = 0;
11770 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11771
11772 ULONGEST u_regval[2] = {0};
11773
11774 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11775
11776 if (bit (thumb_insn_r->arm_insn, 12))
11777 {
11778 /* Handle load/store register offset. */
11779 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11780
11781 if (in_inclusive_range (opB, 4U, 7U))
11782 {
11783 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11784 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11785 record_buf[0] = reg_src1;
11786 thumb_insn_r->reg_rec_count = 1;
11787 }
11788 else if (in_inclusive_range (opB, 0U, 2U))
11789 {
11790 /* STR(2), STRB(2), STRH(2) . */
11791 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11792 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11793 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11794 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11795 if (0 == opB)
11796 record_buf_mem[0] = 4; /* STR (2). */
11797 else if (2 == opB)
11798 record_buf_mem[0] = 1; /* STRB (2). */
11799 else if (1 == opB)
11800 record_buf_mem[0] = 2; /* STRH (2). */
11801 record_buf_mem[1] = u_regval[0] + u_regval[1];
11802 thumb_insn_r->mem_rec_count = 1;
11803 }
11804 }
11805 else if (bit (thumb_insn_r->arm_insn, 11))
11806 {
11807 /* Handle load from literal pool. */
11808 /* LDR(3). */
11809 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11810 record_buf[0] = reg_src1;
11811 thumb_insn_r->reg_rec_count = 1;
11812 }
11813 else if (opcode1)
11814 {
11815 /* Special data instructions and branch and exchange */
11816 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11817 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11818 if ((3 == opcode2) && (!opcode3))
11819 {
11820 /* Branch with exchange. */
11821 record_buf[0] = ARM_PS_REGNUM;
11822 thumb_insn_r->reg_rec_count = 1;
11823 }
11824 else
11825 {
11826 /* Format 8; special data processing insns. */
11827 record_buf[0] = ARM_PS_REGNUM;
11828 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11829 | bits (thumb_insn_r->arm_insn, 0, 2));
11830 thumb_insn_r->reg_rec_count = 2;
11831 }
11832 }
11833 else
11834 {
11835 /* Format 5; data processing insns. */
11836 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11837 if (bit (thumb_insn_r->arm_insn, 7))
11838 {
11839 reg_src1 = reg_src1 + 8;
11840 }
11841 record_buf[0] = ARM_PS_REGNUM;
11842 record_buf[1] = reg_src1;
11843 thumb_insn_r->reg_rec_count = 2;
11844 }
11845
11846 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11847 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11848 record_buf_mem);
11849
11850 return 0;
11851 }
11852
11853 /* Handling opcode 001 insns. */
11854
11855 static int
11856 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11857 {
11858 struct regcache *reg_cache = thumb_insn_r->regcache;
11859 uint32_t record_buf[8], record_buf_mem[8];
11860
11861 uint32_t reg_src1 = 0;
11862 uint32_t opcode = 0, immed_5 = 0;
11863
11864 ULONGEST u_regval = 0;
11865
11866 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11867
11868 if (opcode)
11869 {
11870 /* LDR(1). */
11871 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11872 record_buf[0] = reg_src1;
11873 thumb_insn_r->reg_rec_count = 1;
11874 }
11875 else
11876 {
11877 /* STR(1). */
11878 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11879 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11880 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11881 record_buf_mem[0] = 4;
11882 record_buf_mem[1] = u_regval + (immed_5 * 4);
11883 thumb_insn_r->mem_rec_count = 1;
11884 }
11885
11886 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11887 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11888 record_buf_mem);
11889
11890 return 0;
11891 }
11892
11893 /* Handling opcode 100 insns. */
11894
11895 static int
11896 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11897 {
11898 struct regcache *reg_cache = thumb_insn_r->regcache;
11899 uint32_t record_buf[8], record_buf_mem[8];
11900
11901 uint32_t reg_src1 = 0;
11902 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11903
11904 ULONGEST u_regval = 0;
11905
11906 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11907
11908 if (3 == opcode)
11909 {
11910 /* LDR(4). */
11911 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11912 record_buf[0] = reg_src1;
11913 thumb_insn_r->reg_rec_count = 1;
11914 }
11915 else if (1 == opcode)
11916 {
11917 /* LDRH(1). */
11918 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11919 record_buf[0] = reg_src1;
11920 thumb_insn_r->reg_rec_count = 1;
11921 }
11922 else if (2 == opcode)
11923 {
11924 /* STR(3). */
11925 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11926 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11927 record_buf_mem[0] = 4;
11928 record_buf_mem[1] = u_regval + (immed_8 * 4);
11929 thumb_insn_r->mem_rec_count = 1;
11930 }
11931 else if (0 == opcode)
11932 {
11933 /* STRH(1). */
11934 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11935 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11936 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11937 record_buf_mem[0] = 2;
11938 record_buf_mem[1] = u_regval + (immed_5 * 2);
11939 thumb_insn_r->mem_rec_count = 1;
11940 }
11941
11942 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11943 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11944 record_buf_mem);
11945
11946 return 0;
11947 }
11948
11949 /* Handling opcode 101 insns. */
11950
11951 static int
11952 thumb_record_misc (insn_decode_record *thumb_insn_r)
11953 {
11954 struct regcache *reg_cache = thumb_insn_r->regcache;
11955
11956 uint32_t opcode = 0;
11957 uint32_t register_bits = 0, register_count = 0;
11958 uint32_t index = 0, start_address = 0;
11959 uint32_t record_buf[24], record_buf_mem[48];
11960 uint32_t reg_src1;
11961
11962 ULONGEST u_regval = 0;
11963
11964 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11965
11966 if (opcode == 0 || opcode == 1)
11967 {
11968 /* ADR and ADD (SP plus immediate) */
11969
11970 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11971 record_buf[0] = reg_src1;
11972 thumb_insn_r->reg_rec_count = 1;
11973 }
11974 else
11975 {
11976 /* Miscellaneous 16-bit instructions */
11977 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11978
11979 switch (opcode2)
11980 {
11981 case 6:
11982 /* SETEND and CPS */
11983 break;
11984 case 0:
11985 /* ADD/SUB (SP plus immediate) */
11986 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11987 record_buf[0] = ARM_SP_REGNUM;
11988 thumb_insn_r->reg_rec_count = 1;
11989 break;
11990 case 1: /* fall through */
11991 case 3: /* fall through */
11992 case 9: /* fall through */
11993 case 11:
11994 /* CBNZ, CBZ */
11995 break;
11996 case 2:
11997 /* SXTH, SXTB, UXTH, UXTB */
11998 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11999 thumb_insn_r->reg_rec_count = 1;
12000 break;
12001 case 4: /* fall through */
12002 case 5:
12003 /* PUSH. */
12004 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12005 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12006 while (register_bits)
12007 {
12008 if (register_bits & 0x00000001)
12009 register_count++;
12010 register_bits = register_bits >> 1;
12011 }
12012 start_address = u_regval - \
12013 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12014 thumb_insn_r->mem_rec_count = register_count;
12015 while (register_count)
12016 {
12017 record_buf_mem[(register_count * 2) - 1] = start_address;
12018 record_buf_mem[(register_count * 2) - 2] = 4;
12019 start_address = start_address + 4;
12020 register_count--;
12021 }
12022 record_buf[0] = ARM_SP_REGNUM;
12023 thumb_insn_r->reg_rec_count = 1;
12024 break;
12025 case 10:
12026 /* REV, REV16, REVSH */
12027 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12028 thumb_insn_r->reg_rec_count = 1;
12029 break;
12030 case 12: /* fall through */
12031 case 13:
12032 /* POP. */
12033 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12034 while (register_bits)
12035 {
12036 if (register_bits & 0x00000001)
12037 record_buf[index++] = register_count;
12038 register_bits = register_bits >> 1;
12039 register_count++;
12040 }
12041 record_buf[index++] = ARM_PS_REGNUM;
12042 record_buf[index++] = ARM_SP_REGNUM;
12043 thumb_insn_r->reg_rec_count = index;
12044 break;
12045 case 0xe:
12046 /* BKPT insn. */
12047 /* Handle enhanced software breakpoint insn, BKPT. */
12048 /* CPSR is changed to be executed in ARM state, disabling normal
12049 interrupts, entering abort mode. */
12050 /* According to high vector configuration PC is set. */
12051 /* User hits breakpoint and type reverse, in that case, we need to go back with
12052 previous CPSR and Program Counter. */
12053 record_buf[0] = ARM_PS_REGNUM;
12054 record_buf[1] = ARM_LR_REGNUM;
12055 thumb_insn_r->reg_rec_count = 2;
12056 /* We need to save SPSR value, which is not yet done. */
12057 printf_unfiltered (_("Process record does not support instruction "
12058 "0x%0x at address %s.\n"),
12059 thumb_insn_r->arm_insn,
12060 paddress (thumb_insn_r->gdbarch,
12061 thumb_insn_r->this_addr));
12062 return -1;
12063
12064 case 0xf:
12065 /* If-Then, and hints */
12066 break;
12067 default:
12068 return -1;
12069 };
12070 }
12071
12072 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12073 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12074 record_buf_mem);
12075
12076 return 0;
12077 }
12078
12079 /* Handling opcode 110 insns. */
12080
12081 static int
12082 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12083 {
12084 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12085 struct regcache *reg_cache = thumb_insn_r->regcache;
12086
12087 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12088 uint32_t reg_src1 = 0;
12089 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12090 uint32_t index = 0, start_address = 0;
12091 uint32_t record_buf[24], record_buf_mem[48];
12092
12093 ULONGEST u_regval = 0;
12094
12095 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12096 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12097
12098 if (1 == opcode2)
12099 {
12100
12101 /* LDMIA. */
12102 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12103 /* Get Rn. */
12104 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12105 while (register_bits)
12106 {
12107 if (register_bits & 0x00000001)
12108 record_buf[index++] = register_count;
12109 register_bits = register_bits >> 1;
12110 register_count++;
12111 }
12112 record_buf[index++] = reg_src1;
12113 thumb_insn_r->reg_rec_count = index;
12114 }
12115 else if (0 == opcode2)
12116 {
12117 /* It handles both STMIA. */
12118 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12119 /* Get Rn. */
12120 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12121 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12122 while (register_bits)
12123 {
12124 if (register_bits & 0x00000001)
12125 register_count++;
12126 register_bits = register_bits >> 1;
12127 }
12128 start_address = u_regval;
12129 thumb_insn_r->mem_rec_count = register_count;
12130 while (register_count)
12131 {
12132 record_buf_mem[(register_count * 2) - 1] = start_address;
12133 record_buf_mem[(register_count * 2) - 2] = 4;
12134 start_address = start_address + 4;
12135 register_count--;
12136 }
12137 }
12138 else if (0x1F == opcode1)
12139 {
12140 /* Handle arm syscall insn. */
12141 if (tdep->arm_syscall_record != NULL)
12142 {
12143 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12144 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12145 }
12146 else
12147 {
12148 printf_unfiltered (_("no syscall record support\n"));
12149 return -1;
12150 }
12151 }
12152
12153 /* B (1), conditional branch is automatically taken care in process_record,
12154 as PC is saved there. */
12155
12156 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12157 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12158 record_buf_mem);
12159
12160 return ret;
12161 }
12162
12163 /* Handling opcode 111 insns. */
12164
12165 static int
12166 thumb_record_branch (insn_decode_record *thumb_insn_r)
12167 {
12168 uint32_t record_buf[8];
12169 uint32_t bits_h = 0;
12170
12171 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12172
12173 if (2 == bits_h || 3 == bits_h)
12174 {
12175 /* BL */
12176 record_buf[0] = ARM_LR_REGNUM;
12177 thumb_insn_r->reg_rec_count = 1;
12178 }
12179 else if (1 == bits_h)
12180 {
12181 /* BLX(1). */
12182 record_buf[0] = ARM_PS_REGNUM;
12183 record_buf[1] = ARM_LR_REGNUM;
12184 thumb_insn_r->reg_rec_count = 2;
12185 }
12186
12187 /* B(2) is automatically taken care in process_record, as PC is
12188 saved there. */
12189
12190 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12191
12192 return 0;
12193 }
12194
12195 /* Handler for thumb2 load/store multiple instructions. */
12196
12197 static int
12198 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12199 {
12200 struct regcache *reg_cache = thumb2_insn_r->regcache;
12201
12202 uint32_t reg_rn, op;
12203 uint32_t register_bits = 0, register_count = 0;
12204 uint32_t index = 0, start_address = 0;
12205 uint32_t record_buf[24], record_buf_mem[48];
12206
12207 ULONGEST u_regval = 0;
12208
12209 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12210 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12211
12212 if (0 == op || 3 == op)
12213 {
12214 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12215 {
12216 /* Handle RFE instruction. */
12217 record_buf[0] = ARM_PS_REGNUM;
12218 thumb2_insn_r->reg_rec_count = 1;
12219 }
12220 else
12221 {
12222 /* Handle SRS instruction after reading banked SP. */
12223 return arm_record_unsupported_insn (thumb2_insn_r);
12224 }
12225 }
12226 else if (1 == op || 2 == op)
12227 {
12228 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12229 {
12230 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12231 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12232 while (register_bits)
12233 {
12234 if (register_bits & 0x00000001)
12235 record_buf[index++] = register_count;
12236
12237 register_count++;
12238 register_bits = register_bits >> 1;
12239 }
12240 record_buf[index++] = reg_rn;
12241 record_buf[index++] = ARM_PS_REGNUM;
12242 thumb2_insn_r->reg_rec_count = index;
12243 }
12244 else
12245 {
12246 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12247 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12248 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12249 while (register_bits)
12250 {
12251 if (register_bits & 0x00000001)
12252 register_count++;
12253
12254 register_bits = register_bits >> 1;
12255 }
12256
12257 if (1 == op)
12258 {
12259 /* Start address calculation for LDMDB/LDMEA. */
12260 start_address = u_regval;
12261 }
12262 else if (2 == op)
12263 {
12264 /* Start address calculation for LDMDB/LDMEA. */
12265 start_address = u_regval - register_count * 4;
12266 }
12267
12268 thumb2_insn_r->mem_rec_count = register_count;
12269 while (register_count)
12270 {
12271 record_buf_mem[register_count * 2 - 1] = start_address;
12272 record_buf_mem[register_count * 2 - 2] = 4;
12273 start_address = start_address + 4;
12274 register_count--;
12275 }
12276 record_buf[0] = reg_rn;
12277 record_buf[1] = ARM_PS_REGNUM;
12278 thumb2_insn_r->reg_rec_count = 2;
12279 }
12280 }
12281
12282 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12283 record_buf_mem);
12284 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12285 record_buf);
12286 return ARM_RECORD_SUCCESS;
12287 }
12288
12289 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12290 instructions. */
12291
12292 static int
12293 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12294 {
12295 struct regcache *reg_cache = thumb2_insn_r->regcache;
12296
12297 uint32_t reg_rd, reg_rn, offset_imm;
12298 uint32_t reg_dest1, reg_dest2;
12299 uint32_t address, offset_addr;
12300 uint32_t record_buf[8], record_buf_mem[8];
12301 uint32_t op1, op2, op3;
12302
12303 ULONGEST u_regval[2];
12304
12305 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12306 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12307 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12308
12309 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12310 {
12311 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12312 {
12313 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12314 record_buf[0] = reg_dest1;
12315 record_buf[1] = ARM_PS_REGNUM;
12316 thumb2_insn_r->reg_rec_count = 2;
12317 }
12318
12319 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12320 {
12321 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12322 record_buf[2] = reg_dest2;
12323 thumb2_insn_r->reg_rec_count = 3;
12324 }
12325 }
12326 else
12327 {
12328 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12329 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12330
12331 if (0 == op1 && 0 == op2)
12332 {
12333 /* Handle STREX. */
12334 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12335 address = u_regval[0] + (offset_imm * 4);
12336 record_buf_mem[0] = 4;
12337 record_buf_mem[1] = address;
12338 thumb2_insn_r->mem_rec_count = 1;
12339 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12340 record_buf[0] = reg_rd;
12341 thumb2_insn_r->reg_rec_count = 1;
12342 }
12343 else if (1 == op1 && 0 == op2)
12344 {
12345 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12346 record_buf[0] = reg_rd;
12347 thumb2_insn_r->reg_rec_count = 1;
12348 address = u_regval[0];
12349 record_buf_mem[1] = address;
12350
12351 if (4 == op3)
12352 {
12353 /* Handle STREXB. */
12354 record_buf_mem[0] = 1;
12355 thumb2_insn_r->mem_rec_count = 1;
12356 }
12357 else if (5 == op3)
12358 {
12359 /* Handle STREXH. */
12360 record_buf_mem[0] = 2 ;
12361 thumb2_insn_r->mem_rec_count = 1;
12362 }
12363 else if (7 == op3)
12364 {
12365 /* Handle STREXD. */
12366 address = u_regval[0];
12367 record_buf_mem[0] = 4;
12368 record_buf_mem[2] = 4;
12369 record_buf_mem[3] = address + 4;
12370 thumb2_insn_r->mem_rec_count = 2;
12371 }
12372 }
12373 else
12374 {
12375 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12376
12377 if (bit (thumb2_insn_r->arm_insn, 24))
12378 {
12379 if (bit (thumb2_insn_r->arm_insn, 23))
12380 offset_addr = u_regval[0] + (offset_imm * 4);
12381 else
12382 offset_addr = u_regval[0] - (offset_imm * 4);
12383
12384 address = offset_addr;
12385 }
12386 else
12387 address = u_regval[0];
12388
12389 record_buf_mem[0] = 4;
12390 record_buf_mem[1] = address;
12391 record_buf_mem[2] = 4;
12392 record_buf_mem[3] = address + 4;
12393 thumb2_insn_r->mem_rec_count = 2;
12394 record_buf[0] = reg_rn;
12395 thumb2_insn_r->reg_rec_count = 1;
12396 }
12397 }
12398
12399 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12400 record_buf);
12401 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12402 record_buf_mem);
12403 return ARM_RECORD_SUCCESS;
12404 }
12405
12406 /* Handler for thumb2 data processing (shift register and modified immediate)
12407 instructions. */
12408
12409 static int
12410 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12411 {
12412 uint32_t reg_rd, op;
12413 uint32_t record_buf[8];
12414
12415 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12416 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12417
12418 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12419 {
12420 record_buf[0] = ARM_PS_REGNUM;
12421 thumb2_insn_r->reg_rec_count = 1;
12422 }
12423 else
12424 {
12425 record_buf[0] = reg_rd;
12426 record_buf[1] = ARM_PS_REGNUM;
12427 thumb2_insn_r->reg_rec_count = 2;
12428 }
12429
12430 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12431 record_buf);
12432 return ARM_RECORD_SUCCESS;
12433 }
12434
12435 /* Generic handler for thumb2 instructions which effect destination and PS
12436 registers. */
12437
12438 static int
12439 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12440 {
12441 uint32_t reg_rd;
12442 uint32_t record_buf[8];
12443
12444 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12445
12446 record_buf[0] = reg_rd;
12447 record_buf[1] = ARM_PS_REGNUM;
12448 thumb2_insn_r->reg_rec_count = 2;
12449
12450 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12451 record_buf);
12452 return ARM_RECORD_SUCCESS;
12453 }
12454
12455 /* Handler for thumb2 branch and miscellaneous control instructions. */
12456
12457 static int
12458 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12459 {
12460 uint32_t op, op1, op2;
12461 uint32_t record_buf[8];
12462
12463 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12464 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12465 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12466
12467 /* Handle MSR insn. */
12468 if (!(op1 & 0x2) && 0x38 == op)
12469 {
12470 if (!(op2 & 0x3))
12471 {
12472 /* CPSR is going to be changed. */
12473 record_buf[0] = ARM_PS_REGNUM;
12474 thumb2_insn_r->reg_rec_count = 1;
12475 }
12476 else
12477 {
12478 arm_record_unsupported_insn(thumb2_insn_r);
12479 return -1;
12480 }
12481 }
12482 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12483 {
12484 /* BLX. */
12485 record_buf[0] = ARM_PS_REGNUM;
12486 record_buf[1] = ARM_LR_REGNUM;
12487 thumb2_insn_r->reg_rec_count = 2;
12488 }
12489
12490 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12491 record_buf);
12492 return ARM_RECORD_SUCCESS;
12493 }
12494
12495 /* Handler for thumb2 store single data item instructions. */
12496
12497 static int
12498 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12499 {
12500 struct regcache *reg_cache = thumb2_insn_r->regcache;
12501
12502 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12503 uint32_t address, offset_addr;
12504 uint32_t record_buf[8], record_buf_mem[8];
12505 uint32_t op1, op2;
12506
12507 ULONGEST u_regval[2];
12508
12509 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12510 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12511 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12512 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12513
12514 if (bit (thumb2_insn_r->arm_insn, 23))
12515 {
12516 /* T2 encoding. */
12517 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12518 offset_addr = u_regval[0] + offset_imm;
12519 address = offset_addr;
12520 }
12521 else
12522 {
12523 /* T3 encoding. */
12524 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12525 {
12526 /* Handle STRB (register). */
12527 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12528 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12529 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12530 offset_addr = u_regval[1] << shift_imm;
12531 address = u_regval[0] + offset_addr;
12532 }
12533 else
12534 {
12535 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12536 if (bit (thumb2_insn_r->arm_insn, 10))
12537 {
12538 if (bit (thumb2_insn_r->arm_insn, 9))
12539 offset_addr = u_regval[0] + offset_imm;
12540 else
12541 offset_addr = u_regval[0] - offset_imm;
12542
12543 address = offset_addr;
12544 }
12545 else
12546 address = u_regval[0];
12547 }
12548 }
12549
12550 switch (op1)
12551 {
12552 /* Store byte instructions. */
12553 case 4:
12554 case 0:
12555 record_buf_mem[0] = 1;
12556 break;
12557 /* Store half word instructions. */
12558 case 1:
12559 case 5:
12560 record_buf_mem[0] = 2;
12561 break;
12562 /* Store word instructions. */
12563 case 2:
12564 case 6:
12565 record_buf_mem[0] = 4;
12566 break;
12567
12568 default:
12569 gdb_assert_not_reached ("no decoding pattern found");
12570 break;
12571 }
12572
12573 record_buf_mem[1] = address;
12574 thumb2_insn_r->mem_rec_count = 1;
12575 record_buf[0] = reg_rn;
12576 thumb2_insn_r->reg_rec_count = 1;
12577
12578 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12579 record_buf);
12580 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12581 record_buf_mem);
12582 return ARM_RECORD_SUCCESS;
12583 }
12584
12585 /* Handler for thumb2 load memory hints instructions. */
12586
12587 static int
12588 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12589 {
12590 uint32_t record_buf[8];
12591 uint32_t reg_rt, reg_rn;
12592
12593 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12594 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12595
12596 if (ARM_PC_REGNUM != reg_rt)
12597 {
12598 record_buf[0] = reg_rt;
12599 record_buf[1] = reg_rn;
12600 record_buf[2] = ARM_PS_REGNUM;
12601 thumb2_insn_r->reg_rec_count = 3;
12602
12603 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12604 record_buf);
12605 return ARM_RECORD_SUCCESS;
12606 }
12607
12608 return ARM_RECORD_FAILURE;
12609 }
12610
12611 /* Handler for thumb2 load word instructions. */
12612
12613 static int
12614 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12615 {
12616 uint32_t record_buf[8];
12617
12618 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12619 record_buf[1] = ARM_PS_REGNUM;
12620 thumb2_insn_r->reg_rec_count = 2;
12621
12622 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12623 record_buf);
12624 return ARM_RECORD_SUCCESS;
12625 }
12626
12627 /* Handler for thumb2 long multiply, long multiply accumulate, and
12628 divide instructions. */
12629
12630 static int
12631 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12632 {
12633 uint32_t opcode1 = 0, opcode2 = 0;
12634 uint32_t record_buf[8];
12635
12636 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12637 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12638
12639 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12640 {
12641 /* Handle SMULL, UMULL, SMULAL. */
12642 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12643 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12644 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12645 record_buf[2] = ARM_PS_REGNUM;
12646 thumb2_insn_r->reg_rec_count = 3;
12647 }
12648 else if (1 == opcode1 || 3 == opcode2)
12649 {
12650 /* Handle SDIV and UDIV. */
12651 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12652 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12653 record_buf[2] = ARM_PS_REGNUM;
12654 thumb2_insn_r->reg_rec_count = 3;
12655 }
12656 else
12657 return ARM_RECORD_FAILURE;
12658
12659 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12660 record_buf);
12661 return ARM_RECORD_SUCCESS;
12662 }
12663
12664 /* Record handler for thumb32 coprocessor instructions. */
12665
12666 static int
12667 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12668 {
12669 if (bit (thumb2_insn_r->arm_insn, 25))
12670 return arm_record_coproc_data_proc (thumb2_insn_r);
12671 else
12672 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12673 }
12674
12675 /* Record handler for advance SIMD structure load/store instructions. */
12676
12677 static int
12678 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12679 {
12680 struct regcache *reg_cache = thumb2_insn_r->regcache;
12681 uint32_t l_bit, a_bit, b_bits;
12682 uint32_t record_buf[128], record_buf_mem[128];
12683 uint32_t reg_rn, reg_vd, address, f_elem;
12684 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12685 uint8_t f_ebytes;
12686
12687 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12688 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12689 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12690 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12691 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12692 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12693 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12694 f_elem = 8 / f_ebytes;
12695
12696 if (!l_bit)
12697 {
12698 ULONGEST u_regval = 0;
12699 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12700 address = u_regval;
12701
12702 if (!a_bit)
12703 {
12704 /* Handle VST1. */
12705 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12706 {
12707 if (b_bits == 0x07)
12708 bf_regs = 1;
12709 else if (b_bits == 0x0a)
12710 bf_regs = 2;
12711 else if (b_bits == 0x06)
12712 bf_regs = 3;
12713 else if (b_bits == 0x02)
12714 bf_regs = 4;
12715 else
12716 bf_regs = 0;
12717
12718 for (index_r = 0; index_r < bf_regs; index_r++)
12719 {
12720 for (index_e = 0; index_e < f_elem; index_e++)
12721 {
12722 record_buf_mem[index_m++] = f_ebytes;
12723 record_buf_mem[index_m++] = address;
12724 address = address + f_ebytes;
12725 thumb2_insn_r->mem_rec_count += 1;
12726 }
12727 }
12728 }
12729 /* Handle VST2. */
12730 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12731 {
12732 if (b_bits == 0x09 || b_bits == 0x08)
12733 bf_regs = 1;
12734 else if (b_bits == 0x03)
12735 bf_regs = 2;
12736 else
12737 bf_regs = 0;
12738
12739 for (index_r = 0; index_r < bf_regs; index_r++)
12740 for (index_e = 0; index_e < f_elem; index_e++)
12741 {
12742 for (loop_t = 0; loop_t < 2; loop_t++)
12743 {
12744 record_buf_mem[index_m++] = f_ebytes;
12745 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12746 thumb2_insn_r->mem_rec_count += 1;
12747 }
12748 address = address + (2 * f_ebytes);
12749 }
12750 }
12751 /* Handle VST3. */
12752 else if ((b_bits & 0x0e) == 0x04)
12753 {
12754 for (index_e = 0; index_e < f_elem; index_e++)
12755 {
12756 for (loop_t = 0; loop_t < 3; loop_t++)
12757 {
12758 record_buf_mem[index_m++] = f_ebytes;
12759 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12760 thumb2_insn_r->mem_rec_count += 1;
12761 }
12762 address = address + (3 * f_ebytes);
12763 }
12764 }
12765 /* Handle VST4. */
12766 else if (!(b_bits & 0x0e))
12767 {
12768 for (index_e = 0; index_e < f_elem; index_e++)
12769 {
12770 for (loop_t = 0; loop_t < 4; loop_t++)
12771 {
12772 record_buf_mem[index_m++] = f_ebytes;
12773 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12774 thumb2_insn_r->mem_rec_count += 1;
12775 }
12776 address = address + (4 * f_ebytes);
12777 }
12778 }
12779 }
12780 else
12781 {
12782 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12783
12784 if (bft_size == 0x00)
12785 f_ebytes = 1;
12786 else if (bft_size == 0x01)
12787 f_ebytes = 2;
12788 else if (bft_size == 0x02)
12789 f_ebytes = 4;
12790 else
12791 f_ebytes = 0;
12792
12793 /* Handle VST1. */
12794 if (!(b_bits & 0x0b) || b_bits == 0x08)
12795 thumb2_insn_r->mem_rec_count = 1;
12796 /* Handle VST2. */
12797 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12798 thumb2_insn_r->mem_rec_count = 2;
12799 /* Handle VST3. */
12800 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12801 thumb2_insn_r->mem_rec_count = 3;
12802 /* Handle VST4. */
12803 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12804 thumb2_insn_r->mem_rec_count = 4;
12805
12806 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12807 {
12808 record_buf_mem[index_m] = f_ebytes;
12809 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12810 }
12811 }
12812 }
12813 else
12814 {
12815 if (!a_bit)
12816 {
12817 /* Handle VLD1. */
12818 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12819 thumb2_insn_r->reg_rec_count = 1;
12820 /* Handle VLD2. */
12821 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12822 thumb2_insn_r->reg_rec_count = 2;
12823 /* Handle VLD3. */
12824 else if ((b_bits & 0x0e) == 0x04)
12825 thumb2_insn_r->reg_rec_count = 3;
12826 /* Handle VLD4. */
12827 else if (!(b_bits & 0x0e))
12828 thumb2_insn_r->reg_rec_count = 4;
12829 }
12830 else
12831 {
12832 /* Handle VLD1. */
12833 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12834 thumb2_insn_r->reg_rec_count = 1;
12835 /* Handle VLD2. */
12836 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12837 thumb2_insn_r->reg_rec_count = 2;
12838 /* Handle VLD3. */
12839 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12840 thumb2_insn_r->reg_rec_count = 3;
12841 /* Handle VLD4. */
12842 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12843 thumb2_insn_r->reg_rec_count = 4;
12844
12845 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12846 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12847 }
12848 }
12849
12850 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12851 {
12852 record_buf[index_r] = reg_rn;
12853 thumb2_insn_r->reg_rec_count += 1;
12854 }
12855
12856 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12857 record_buf);
12858 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12859 record_buf_mem);
12860 return 0;
12861 }
12862
12863 /* Decodes thumb2 instruction type and invokes its record handler. */
12864
12865 static unsigned int
12866 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12867 {
12868 uint32_t op, op1, op2;
12869
12870 op = bit (thumb2_insn_r->arm_insn, 15);
12871 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12872 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12873
12874 if (op1 == 0x01)
12875 {
12876 if (!(op2 & 0x64 ))
12877 {
12878 /* Load/store multiple instruction. */
12879 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12880 }
12881 else if ((op2 & 0x64) == 0x4)
12882 {
12883 /* Load/store (dual/exclusive) and table branch instruction. */
12884 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12885 }
12886 else if ((op2 & 0x60) == 0x20)
12887 {
12888 /* Data-processing (shifted register). */
12889 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12890 }
12891 else if (op2 & 0x40)
12892 {
12893 /* Co-processor instructions. */
12894 return thumb2_record_coproc_insn (thumb2_insn_r);
12895 }
12896 }
12897 else if (op1 == 0x02)
12898 {
12899 if (op)
12900 {
12901 /* Branches and miscellaneous control instructions. */
12902 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12903 }
12904 else if (op2 & 0x20)
12905 {
12906 /* Data-processing (plain binary immediate) instruction. */
12907 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12908 }
12909 else
12910 {
12911 /* Data-processing (modified immediate). */
12912 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12913 }
12914 }
12915 else if (op1 == 0x03)
12916 {
12917 if (!(op2 & 0x71 ))
12918 {
12919 /* Store single data item. */
12920 return thumb2_record_str_single_data (thumb2_insn_r);
12921 }
12922 else if (!((op2 & 0x71) ^ 0x10))
12923 {
12924 /* Advanced SIMD or structure load/store instructions. */
12925 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12926 }
12927 else if (!((op2 & 0x67) ^ 0x01))
12928 {
12929 /* Load byte, memory hints instruction. */
12930 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12931 }
12932 else if (!((op2 & 0x67) ^ 0x03))
12933 {
12934 /* Load halfword, memory hints instruction. */
12935 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12936 }
12937 else if (!((op2 & 0x67) ^ 0x05))
12938 {
12939 /* Load word instruction. */
12940 return thumb2_record_ld_word (thumb2_insn_r);
12941 }
12942 else if (!((op2 & 0x70) ^ 0x20))
12943 {
12944 /* Data-processing (register) instruction. */
12945 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12946 }
12947 else if (!((op2 & 0x78) ^ 0x30))
12948 {
12949 /* Multiply, multiply accumulate, abs diff instruction. */
12950 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12951 }
12952 else if (!((op2 & 0x78) ^ 0x38))
12953 {
12954 /* Long multiply, long multiply accumulate, and divide. */
12955 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12956 }
12957 else if (op2 & 0x40)
12958 {
12959 /* Co-processor instructions. */
12960 return thumb2_record_coproc_insn (thumb2_insn_r);
12961 }
12962 }
12963
12964 return -1;
12965 }
12966
12967 namespace {
12968 /* Abstract memory reader. */
12969
12970 class abstract_memory_reader
12971 {
12972 public:
12973 /* Read LEN bytes of target memory at address MEMADDR, placing the
12974 results in GDB's memory at BUF. Return true on success. */
12975
12976 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12977 };
12978
12979 /* Instruction reader from real target. */
12980
12981 class instruction_reader : public abstract_memory_reader
12982 {
12983 public:
12984 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12985 {
12986 if (target_read_memory (memaddr, buf, len))
12987 return false;
12988 else
12989 return true;
12990 }
12991 };
12992
12993 } // namespace
12994
12995 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12996 and positive val on fauilure. */
12997
12998 static int
12999 extract_arm_insn (abstract_memory_reader& reader,
13000 insn_decode_record *insn_record, uint32_t insn_size)
13001 {
13002 gdb_byte buf[insn_size];
13003
13004 memset (&buf[0], 0, insn_size);
13005
13006 if (!reader.read (insn_record->this_addr, buf, insn_size))
13007 return 1;
13008 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13009 insn_size,
13010 gdbarch_byte_order_for_code (insn_record->gdbarch));
13011 return 0;
13012 }
13013
13014 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13015
13016 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13017 dispatch it. */
13018
13019 static int
13020 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13021 record_type_t record_type, uint32_t insn_size)
13022 {
13023
13024 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13025 instruction. */
13026 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13027 {
13028 arm_record_data_proc_misc_ld_str, /* 000. */
13029 arm_record_data_proc_imm, /* 001. */
13030 arm_record_ld_st_imm_offset, /* 010. */
13031 arm_record_ld_st_reg_offset, /* 011. */
13032 arm_record_ld_st_multiple, /* 100. */
13033 arm_record_b_bl, /* 101. */
13034 arm_record_asimd_vfp_coproc, /* 110. */
13035 arm_record_coproc_data_proc /* 111. */
13036 };
13037
13038 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13039 instruction. */
13040 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13041 { \
13042 thumb_record_shift_add_sub, /* 000. */
13043 thumb_record_add_sub_cmp_mov, /* 001. */
13044 thumb_record_ld_st_reg_offset, /* 010. */
13045 thumb_record_ld_st_imm_offset, /* 011. */
13046 thumb_record_ld_st_stack, /* 100. */
13047 thumb_record_misc, /* 101. */
13048 thumb_record_ldm_stm_swi, /* 110. */
13049 thumb_record_branch /* 111. */
13050 };
13051
13052 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13053 uint32_t insn_id = 0;
13054
13055 if (extract_arm_insn (reader, arm_record, insn_size))
13056 {
13057 if (record_debug)
13058 {
13059 printf_unfiltered (_("Process record: error reading memory at "
13060 "addr %s len = %d.\n"),
13061 paddress (arm_record->gdbarch,
13062 arm_record->this_addr), insn_size);
13063 }
13064 return -1;
13065 }
13066 else if (ARM_RECORD == record_type)
13067 {
13068 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13069 insn_id = bits (arm_record->arm_insn, 25, 27);
13070
13071 if (arm_record->cond == 0xf)
13072 ret = arm_record_extension_space (arm_record);
13073 else
13074 {
13075 /* If this insn has fallen into extension space
13076 then we need not decode it anymore. */
13077 ret = arm_handle_insn[insn_id] (arm_record);
13078 }
13079 if (ret != ARM_RECORD_SUCCESS)
13080 {
13081 arm_record_unsupported_insn (arm_record);
13082 ret = -1;
13083 }
13084 }
13085 else if (THUMB_RECORD == record_type)
13086 {
13087 /* As thumb does not have condition codes, we set negative. */
13088 arm_record->cond = -1;
13089 insn_id = bits (arm_record->arm_insn, 13, 15);
13090 ret = thumb_handle_insn[insn_id] (arm_record);
13091 if (ret != ARM_RECORD_SUCCESS)
13092 {
13093 arm_record_unsupported_insn (arm_record);
13094 ret = -1;
13095 }
13096 }
13097 else if (THUMB2_RECORD == record_type)
13098 {
13099 /* As thumb does not have condition codes, we set negative. */
13100 arm_record->cond = -1;
13101
13102 /* Swap first half of 32bit thumb instruction with second half. */
13103 arm_record->arm_insn
13104 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13105
13106 ret = thumb2_record_decode_insn_handler (arm_record);
13107
13108 if (ret != ARM_RECORD_SUCCESS)
13109 {
13110 arm_record_unsupported_insn (arm_record);
13111 ret = -1;
13112 }
13113 }
13114 else
13115 {
13116 /* Throw assertion. */
13117 gdb_assert_not_reached ("not a valid instruction, could not decode");
13118 }
13119
13120 return ret;
13121 }
13122
13123 #if GDB_SELF_TEST
13124 namespace selftests {
13125
13126 /* Provide both 16-bit and 32-bit thumb instructions. */
13127
13128 class instruction_reader_thumb : public abstract_memory_reader
13129 {
13130 public:
13131 template<size_t SIZE>
13132 instruction_reader_thumb (enum bfd_endian endian,
13133 const uint16_t (&insns)[SIZE])
13134 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13135 {}
13136
13137 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13138 {
13139 SELF_CHECK (len == 4 || len == 2);
13140 SELF_CHECK (memaddr % 2 == 0);
13141 SELF_CHECK ((memaddr / 2) < m_insns_size);
13142
13143 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13144 if (len == 4)
13145 {
13146 store_unsigned_integer (&buf[2], 2, m_endian,
13147 m_insns[memaddr / 2 + 1]);
13148 }
13149 return true;
13150 }
13151
13152 private:
13153 enum bfd_endian m_endian;
13154 const uint16_t *m_insns;
13155 size_t m_insns_size;
13156 };
13157
13158 static void
13159 arm_record_test (void)
13160 {
13161 struct gdbarch_info info;
13162 gdbarch_info_init (&info);
13163 info.bfd_arch_info = bfd_scan_arch ("arm");
13164
13165 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13166
13167 SELF_CHECK (gdbarch != NULL);
13168
13169 /* 16-bit Thumb instructions. */
13170 {
13171 insn_decode_record arm_record;
13172
13173 memset (&arm_record, 0, sizeof (insn_decode_record));
13174 arm_record.gdbarch = gdbarch;
13175
13176 static const uint16_t insns[] = {
13177 /* db b2 uxtb r3, r3 */
13178 0xb2db,
13179 /* cd 58 ldr r5, [r1, r3] */
13180 0x58cd,
13181 };
13182
13183 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13184 instruction_reader_thumb reader (endian, insns);
13185 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13186 THUMB_INSN_SIZE_BYTES);
13187
13188 SELF_CHECK (ret == 0);
13189 SELF_CHECK (arm_record.mem_rec_count == 0);
13190 SELF_CHECK (arm_record.reg_rec_count == 1);
13191 SELF_CHECK (arm_record.arm_regs[0] == 3);
13192
13193 arm_record.this_addr += 2;
13194 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13195 THUMB_INSN_SIZE_BYTES);
13196
13197 SELF_CHECK (ret == 0);
13198 SELF_CHECK (arm_record.mem_rec_count == 0);
13199 SELF_CHECK (arm_record.reg_rec_count == 1);
13200 SELF_CHECK (arm_record.arm_regs[0] == 5);
13201 }
13202
13203 /* 32-bit Thumb-2 instructions. */
13204 {
13205 insn_decode_record arm_record;
13206
13207 memset (&arm_record, 0, sizeof (insn_decode_record));
13208 arm_record.gdbarch = gdbarch;
13209
13210 static const uint16_t insns[] = {
13211 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13212 0xee1d, 0x7f70,
13213 };
13214
13215 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13216 instruction_reader_thumb reader (endian, insns);
13217 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13218 THUMB2_INSN_SIZE_BYTES);
13219
13220 SELF_CHECK (ret == 0);
13221 SELF_CHECK (arm_record.mem_rec_count == 0);
13222 SELF_CHECK (arm_record.reg_rec_count == 1);
13223 SELF_CHECK (arm_record.arm_regs[0] == 7);
13224 }
13225 }
13226 } // namespace selftests
13227 #endif /* GDB_SELF_TEST */
13228
13229 /* Cleans up local record registers and memory allocations. */
13230
13231 static void
13232 deallocate_reg_mem (insn_decode_record *record)
13233 {
13234 xfree (record->arm_regs);
13235 xfree (record->arm_mems);
13236 }
13237
13238
13239 /* Parse the current instruction and record the values of the registers and
13240 memory that will be changed in current instruction to record_arch_list".
13241 Return -1 if something is wrong. */
13242
13243 int
13244 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13245 CORE_ADDR insn_addr)
13246 {
13247
13248 uint32_t no_of_rec = 0;
13249 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13250 ULONGEST t_bit = 0, insn_id = 0;
13251
13252 ULONGEST u_regval = 0;
13253
13254 insn_decode_record arm_record;
13255
13256 memset (&arm_record, 0, sizeof (insn_decode_record));
13257 arm_record.regcache = regcache;
13258 arm_record.this_addr = insn_addr;
13259 arm_record.gdbarch = gdbarch;
13260
13261
13262 if (record_debug > 1)
13263 {
13264 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13265 "addr = %s\n",
13266 paddress (gdbarch, arm_record.this_addr));
13267 }
13268
13269 instruction_reader reader;
13270 if (extract_arm_insn (reader, &arm_record, 2))
13271 {
13272 if (record_debug)
13273 {
13274 printf_unfiltered (_("Process record: error reading memory at "
13275 "addr %s len = %d.\n"),
13276 paddress (arm_record.gdbarch,
13277 arm_record.this_addr), 2);
13278 }
13279 return -1;
13280 }
13281
13282 /* Check the insn, whether it is thumb or arm one. */
13283
13284 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13285 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13286
13287
13288 if (!(u_regval & t_bit))
13289 {
13290 /* We are decoding arm insn. */
13291 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13292 }
13293 else
13294 {
13295 insn_id = bits (arm_record.arm_insn, 11, 15);
13296 /* is it thumb2 insn? */
13297 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13298 {
13299 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13300 THUMB2_INSN_SIZE_BYTES);
13301 }
13302 else
13303 {
13304 /* We are decoding thumb insn. */
13305 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13306 THUMB_INSN_SIZE_BYTES);
13307 }
13308 }
13309
13310 if (0 == ret)
13311 {
13312 /* Record registers. */
13313 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13314 if (arm_record.arm_regs)
13315 {
13316 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13317 {
13318 if (record_full_arch_list_add_reg
13319 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13320 ret = -1;
13321 }
13322 }
13323 /* Record memories. */
13324 if (arm_record.arm_mems)
13325 {
13326 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13327 {
13328 if (record_full_arch_list_add_mem
13329 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13330 arm_record.arm_mems[no_of_rec].len))
13331 ret = -1;
13332 }
13333 }
13334
13335 if (record_full_arch_list_add_end ())
13336 ret = -1;
13337 }
13338
13339
13340 deallocate_reg_mem (&arm_record);
13341
13342 return ret;
13343 }